From 2f4212b80a86724fbe73e83c8063f8deccd87f48 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 14 May 2018 19:56:50 +0200 Subject: [PATCH 01/74] Fold RestGetAllSettingsAction in RestGetSettingsAction (#30561) We currently have a separate endpoint for retrieving settings from all indices. We introduced such endpoint when removing comma-separated feature parsing for GetIndicesAction. The RestGetAllSettingsAction duplicates the code to print out the response that we already have in GetSettingsResponse (since it became a ToXContentObject), and uses the get index API internally instead of the get settings API, but the response is the same, hence we can fold get all settings and get settings in a single API, which is what this commit does. --- .../elasticsearch/action/ActionModule.java | 2 - .../indices/RestGetAllSettingsAction.java | 121 ------------------ .../admin/indices/RestGetSettingsAction.java | 5 +- 3 files changed, 1 insertion(+), 127 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllSettingsAction.java diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 42ff432240381..fa4d751a54aed 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -253,7 +253,6 @@ import org.elasticsearch.rest.action.admin.indices.RestGetAliasesAction; import org.elasticsearch.rest.action.admin.indices.RestGetAllAliasesAction; import org.elasticsearch.rest.action.admin.indices.RestGetAllMappingsAction; -import org.elasticsearch.rest.action.admin.indices.RestGetAllSettingsAction; import org.elasticsearch.rest.action.admin.indices.RestGetFieldMappingAction; import org.elasticsearch.rest.action.admin.indices.RestGetIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.RestGetIndicesAction; @@ -558,7 +557,6 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestGetAllAliasesAction(settings, restController)); registerHandler.accept(new RestGetAllMappingsAction(settings, restController)); - registerHandler.accept(new RestGetAllSettingsAction(settings, restController, indexScopedSettings, settingsFilter)); registerHandler.accept(new RestGetIndicesAction(settings, restController, indexScopedSettings, settingsFilter)); registerHandler.accept(new RestIndicesStatsAction(settings, restController)); registerHandler.accept(new RestIndicesSegmentsAction(settings, restController)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllSettingsAction.java deleted file mode 100644 index f51cee37ad3f0..0000000000000 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllSettingsAction.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.admin.indices; - -import org.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; -import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.metadata.AliasMetaData; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.IndexScopedSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.common.xcontent.ToXContent.Params; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.action.RestBuilderListener; - -import java.io.IOException; -import java.util.List; -import java.util.Set; - -import static org.elasticsearch.rest.RestRequest.Method.GET; -import static org.elasticsearch.rest.RestRequest.Method.HEAD; -import static org.elasticsearch.rest.RestStatus.OK; - -/** - * The REST handler for retrieving all settings - */ -public class RestGetAllSettingsAction extends BaseRestHandler { - - private final IndexScopedSettings indexScopedSettings; - private final SettingsFilter settingsFilter; - - public RestGetAllSettingsAction(final Settings settings, final RestController controller, - final IndexScopedSettings indexScopedSettings, final SettingsFilter settingsFilter) { - super(settings); - this.indexScopedSettings = indexScopedSettings; - controller.registerHandler(GET, "/_settings", this); - this.settingsFilter = settingsFilter; - } - - @Override - public String getName() { - return "get_all_settings_action"; - } - - @Override - public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final GetIndexRequest getIndexRequest = new GetIndexRequest(); - getIndexRequest.indices(Strings.EMPTY_ARRAY); - getIndexRequest.features(Feature.SETTINGS); - getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); - getIndexRequest.local(request.paramAsBoolean("local", getIndexRequest.local())); - getIndexRequest.humanReadable(request.paramAsBoolean("human", false)); - // This is required so the "flat_settings" parameter counts as consumed - request.paramAsBoolean("flat_settings", false); - final boolean defaults = request.paramAsBoolean("include_defaults", false); - return channel -> client.admin().indices().getIndex(getIndexRequest, new RestBuilderListener(channel) { - - @Override - public RestResponse buildResponse(final GetIndexResponse response, final XContentBuilder builder) throws Exception { - builder.startObject(); - { - for (final String index : response.indices()) { - builder.startObject(index); - { - writeSettings(response.settings().get(index), builder, request, defaults); - } - builder.endObject(); - } - } - builder.endObject(); - - return new BytesRestResponse(OK, builder); - } - - - private void writeSettings(final Settings settings, final XContentBuilder builder, - final Params params, final boolean defaults) throws IOException { - builder.startObject("settings"); - { - settings.toXContent(builder, params); - } - builder.endObject(); - if (defaults) { - builder.startObject("defaults"); - { - settingsFilter - .filter(indexScopedSettings.diff(settings, RestGetAllSettingsAction.this.settings)) - .toXContent(builder, request); - } - builder.endObject(); - } - } - }); - } - -} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java index 9791994c773e2..d9fa50cf9410d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java @@ -19,16 +19,12 @@ package org.elasticsearch.rest.action.admin.indices; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -46,6 +42,7 @@ public class RestGetSettingsAction extends BaseRestHandler { public RestGetSettingsAction(Settings settings, RestController controller) { super(settings); + controller.registerHandler(GET, "/_settings", this); controller.registerHandler(GET, "/_settings/{name}", this); controller.registerHandler(GET, "/{index}/_settings", this); controller.registerHandler(GET, "/{index}/_settings/{name}", this); From d5f028e0853609a61ee59199784d6abad547191c Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 14 May 2018 20:12:52 +0200 Subject: [PATCH 02/74] Auto-expand replicas only after failing nodes (#30553) #30423 combined auto-expansion in the same cluster state update where nodes are removed. As the auto-expansion step would run before deassociating the dead nodes from the routing table, the auto-expansion would possibly remove replicas from live nodes instead of dead ones. This commit reverses the order to ensure that when nodes leave the cluster that the auto-expand-replica functionality only triggers after failing the shards on the removed nodes. This ensures that active shards on other live nodes are not failed if the primary resided on a now dead node. Instead, one of the replicas on the live nodes first gets promoted to primary, and the auto- expansion (removing replicas) only triggers in a follow-up step (but still same cluster state update). Relates to #30456 and follow-up of #30423 --- .../routing/allocation/AllocationService.java | 42 +++--- .../discovery/zen/NodeJoinController.java | 4 +- .../metadata/AutoExpandReplicasTests.java | 128 ++++++++++++++++++ .../indices/cluster/ClusterStateChanges.java | 10 ++ 4 files changed, 164 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index deb10b83b5a5d..569ddd6cee772 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -114,11 +114,24 @@ public ClusterState applyStartedShards(ClusterState clusterState, List roles = new HashSet<>(randomSubsetOf(Sets.newHashSet(DiscoveryNode.Role.values()))); + for (DiscoveryNode.Role mustHaveRole : mustHaveRoles) { + roles.add(mustHaveRole); + } + final String id = String.format(Locale.ROOT, "node_%03d", nodeIdGenerator.incrementAndGet()); + return new DiscoveryNode(id, id, buildNewFakeTransportAddress(), Collections.emptyMap(), roles, + Version.CURRENT); + } + + /** + * Checks that when nodes leave the cluster that the auto-expand-replica functionality only triggers after failing the shards on + * the removed nodes. This ensures that active shards on other live nodes are not failed if the primary resided on a now dead node. + * Instead, one of the replicas on the live nodes first gets promoted to primary, and the auto-expansion (removing replicas) only + * triggers in a follow-up step. + */ + public void testAutoExpandWhenNodeLeavesAndPossiblyRejoins() throws InterruptedException { + final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + final ClusterStateChanges cluster = new ClusterStateChanges(xContentRegistry(), threadPool); + + try { + List allNodes = new ArrayList<>(); + DiscoveryNode localNode = createNode(DiscoveryNode.Role.MASTER); // local node is the master + allNodes.add(localNode); + int numDataNodes = randomIntBetween(3, 5); + List dataNodes = new ArrayList<>(numDataNodes); + for (int i = 0; i < numDataNodes; i++) { + dataNodes.add(createNode(DiscoveryNode.Role.DATA)); + } + allNodes.addAll(dataNodes); + ClusterState state = ClusterStateCreationUtils.state(localNode, localNode, allNodes.toArray(new DiscoveryNode[allNodes.size()])); + + CreateIndexRequest request = new CreateIndexRequest("index", + Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, 1) + .put(SETTING_AUTO_EXPAND_REPLICAS, "0-all").build()) + .waitForActiveShards(ActiveShardCount.NONE); + state = cluster.createIndex(state, request); + assertTrue(state.metaData().hasIndex("index")); + while (state.routingTable().index("index").shard(0).allShardsStarted() == false) { + logger.info(state); + state = cluster.applyStartedShards(state, + state.routingTable().index("index").shard(0).shardsWithState(ShardRoutingState.INITIALIZING)); + state = cluster.reroute(state, new ClusterRerouteRequest()); + } + + IndexShardRoutingTable preTable = state.routingTable().index("index").shard(0); + final Set unchangedNodeIds; + final IndexShardRoutingTable postTable; + + if (randomBoolean()) { + // simulate node removal + List nodesToRemove = randomSubsetOf(2, dataNodes); + unchangedNodeIds = dataNodes.stream().filter(n -> nodesToRemove.contains(n) == false) + .map(DiscoveryNode::getId).collect(Collectors.toSet()); + + state = cluster.removeNodes(state, nodesToRemove); + postTable = state.routingTable().index("index").shard(0); + + assertTrue("not all shards started in " + state.toString(), postTable.allShardsStarted()); + assertThat(postTable.toString(), postTable.getAllAllocationIds(), everyItem(isIn(preTable.getAllAllocationIds()))); + } else { + // fake an election where conflicting nodes are removed and readded + state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).masterNodeId(null).build()).build(); + + List conflictingNodes = randomSubsetOf(2, dataNodes); + unchangedNodeIds = dataNodes.stream().filter(n -> conflictingNodes.contains(n) == false) + .map(DiscoveryNode::getId).collect(Collectors.toSet()); + + List nodesToAdd = conflictingNodes.stream() + .map(n -> new DiscoveryNode(n.getName(), n.getId(), buildNewFakeTransportAddress(), n.getAttributes(), n.getRoles(), n.getVersion())) + .collect(Collectors.toList()); + + if (randomBoolean()) { + nodesToAdd.add(createNode(DiscoveryNode.Role.DATA)); + } + + state = cluster.joinNodesAndBecomeMaster(state, nodesToAdd); + postTable = state.routingTable().index("index").shard(0); + } + + Set unchangedAllocationIds = preTable.getShards().stream().filter(shr -> unchangedNodeIds.contains(shr.currentNodeId())) + .map(shr -> shr.allocationId().getId()).collect(Collectors.toSet()); + + assertThat(postTable.toString(), unchangedAllocationIds, everyItem(isIn(postTable.getAllAllocationIds()))); + + postTable.getShards().forEach( + shardRouting -> { + if (shardRouting.assignedToNode() && unchangedAllocationIds.contains(shardRouting.allocationId().getId())) { + assertTrue("Shard should be active: " + shardRouting, shardRouting.active()); + } + } + ); + } finally { + terminate(threadPool); + } + } } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index 9e8638af2491e..8bfd08244e466 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -87,6 +87,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -232,6 +233,15 @@ public ClusterState addNodes(ClusterState clusterState, List node return runTasks(joinTaskExecutor, clusterState, nodes); } + public ClusterState joinNodesAndBecomeMaster(ClusterState clusterState, List nodes) { + List joinNodes = new ArrayList<>(); + joinNodes.add(NodeJoinController.BECOME_MASTER_TASK); + joinNodes.add(NodeJoinController.FINISH_ELECTION_TASK); + joinNodes.addAll(nodes); + + return runTasks(joinTaskExecutor, clusterState, joinNodes); + } + public ClusterState removeNodes(ClusterState clusterState, List nodes) { return runTasks(nodeRemovalExecutor, clusterState, nodes.stream() .map(n -> new ZenDiscovery.NodeRemovalClusterStateTaskExecutor.Task(n, "dummy reason")).collect(Collectors.toList())); From df852fbdd9a28b3e58ff670f20a079acc52194ea Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 14 May 2018 20:23:43 +0200 Subject: [PATCH 03/74] Fix non existing javadocs link in RestClientTests --- .../src/test/java/org/elasticsearch/client/RestClientTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java index 872b327954b02..ea124828e45eb 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java @@ -96,7 +96,7 @@ public void onFailure(Exception exception) { } /** - * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testSetParameters()}. + * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddParameters()}. */ @Deprecated public void testPerformOldStyleAsyncWithNullParams() throws Exception { From b30f2913cf68d1f2b7bd9b7e5d626c073f545578 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Mon, 14 May 2018 15:54:42 -0400 Subject: [PATCH 04/74] Docs: document precision limitations of geo_bounding_box (#30540) The geo_bounding_box query might produce false positives alongside the right and upper edges and false negatives alongside left and bottom edges. This commit documents the behavior and defines the maximum error. Closes #29196 --- .../query-dsl/geo-bounding-box-query.asciidoc | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc index a1b427acf2718..21a689703e01e 100644 --- a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc +++ b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc @@ -329,3 +329,16 @@ and will not match any documents for this query. This can be useful when querying multiple indexes which might have different mappings. When set to `false` (the default value) the query will throw an exception if the field is not mapped. + +[float] +==== Notes on Precision + +Geopoints have limited precision and are always rounded down during index time. +During the query time, upper boundaries of the bounding boxes are rounded down, +while lower boundaries are rounded up. As a result, the points along on the +lower bounds (bottom and left edges of the bounding box) might not make it into +the bounding box due to the rounding error. At the same time points alongside +the upper bounds (top and right edges) might be selected by the query even if +they are located slightly outside the edge. The rounding error should be less +than 4.20e-8 degrees on the latitude and less than 8.39e-8 degrees on the +longitude, which translates to less than 1cm error even at the equator. From fa45c6c9a63d25bc4abb89f49014a7774acffbf0 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 14 May 2018 13:07:27 -0700 Subject: [PATCH 05/74] [DOCS] Fix path info for various security files (#30502) --- x-pack/docs/en/commands/syskeygen.asciidoc | 2 +- x-pack/docs/en/security/auditing.asciidoc | 2 +- .../configuring-ldap-realm.asciidoc | 2 +- .../docs/en/security/authorization.asciidoc | 2 +- .../authorization/mapping-roles.asciidoc | 2 +- .../docs/en/security/reference/files.asciidoc | 6 +++--- .../configuring-tls-docker.asciidoc | 20 +++++++++---------- .../securing-communications/tls-http.asciidoc | 6 +++--- .../securing-communications/tls-ldap.asciidoc | 2 +- .../tls-transport.asciidoc | 6 +++--- .../en/setup/bootstrap-checks-xes.asciidoc | 2 +- .../en/watcher/trigger/schedule/cron.asciidoc | 2 +- 12 files changed, 27 insertions(+), 27 deletions(-) diff --git a/x-pack/docs/en/commands/syskeygen.asciidoc b/x-pack/docs/en/commands/syskeygen.asciidoc index 8683d801d58f1..f4a198ff4bf22 100644 --- a/x-pack/docs/en/commands/syskeygen.asciidoc +++ b/x-pack/docs/en/commands/syskeygen.asciidoc @@ -43,7 +43,7 @@ environment variable. === Examples The following command generates a `system_key` file in the -default `$ES_HOME/config/x-pack` directory: +default `$ES_HOME/config` directory: [source, sh] -------------------------------------------------- diff --git a/x-pack/docs/en/security/auditing.asciidoc b/x-pack/docs/en/security/auditing.asciidoc index 8bff8727f8358..6cd31d076f94f 100644 --- a/x-pack/docs/en/security/auditing.asciidoc +++ b/x-pack/docs/en/security/auditing.asciidoc @@ -330,7 +330,7 @@ audited in plain text when including the request body in audit events. [[logging-file]] You can also configure how the logfile is written in the `log4j2.properties` -file located in `CONFIG_DIR/x-pack`. By default, audit information is appended to the +file located in `CONFIG_DIR`. By default, audit information is appended to the `_access.log` file located in the standard Elasticsearch `logs` directory (typically located at `$ES_HOME/logs`). The file rolls over on a daily basis. diff --git a/x-pack/docs/en/security/authentication/configuring-ldap-realm.asciidoc b/x-pack/docs/en/security/authentication/configuring-ldap-realm.asciidoc index b43a0911e0467..6ea9b243aad4d 100644 --- a/x-pack/docs/en/security/authentication/configuring-ldap-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/configuring-ldap-realm.asciidoc @@ -56,7 +56,7 @@ xpack: group_search: base_dn: "dc=example,dc=com" files: - role_mapping: "CONFIG_DIR/x-pack/role_mapping.yml" + role_mapping: "CONFIG_DIR/role_mapping.yml" unmapped_groups_as_roles: false ------------------------------------------------------------ diff --git a/x-pack/docs/en/security/authorization.asciidoc b/x-pack/docs/en/security/authorization.asciidoc index 4a3ffe399de1b..ed171415056da 100644 --- a/x-pack/docs/en/security/authorization.asciidoc +++ b/x-pack/docs/en/security/authorization.asciidoc @@ -295,7 +295,7 @@ see {ref}/security-api-roles.html[Role Management APIs]. === File-based Role Management Apart from the _Role Management APIs_, roles can also be defined in local -`roles.yml` file located in `CONFIG_DIR/x-pack`. This is a YAML file where each +`roles.yml` file located in `CONFIG_DIR`. This is a YAML file where each role definition is keyed by its name. [IMPORTANT] diff --git a/x-pack/docs/en/security/authorization/mapping-roles.asciidoc b/x-pack/docs/en/security/authorization/mapping-roles.asciidoc index 590546e217c86..2c1f1998c6883 100644 --- a/x-pack/docs/en/security/authorization/mapping-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/mapping-roles.asciidoc @@ -36,7 +36,7 @@ To use file based role-mappings, you must configure the mappings in a YAML file and copy it to each node in the cluster. Tools like Puppet or Chef can help with this. -By default, role mappings are stored in `ES_PATH_CONF/x-pack/role_mapping.yml`, +By default, role mappings are stored in `ES_PATH_CONF/role_mapping.yml`, where `ES_PATH_CONF` is `ES_HOME/config` (zip/tar installations) or `/etc/elasticsearch` (package installations). To specify a different location, you configure the `files.role_mapping` realm settings in `elasticsearch.yml`. diff --git a/x-pack/docs/en/security/reference/files.asciidoc b/x-pack/docs/en/security/reference/files.asciidoc index cec8f9d1a3bcc..dcf673d9a9f26 100644 --- a/x-pack/docs/en/security/reference/files.asciidoc +++ b/x-pack/docs/en/security/reference/files.asciidoc @@ -3,7 +3,7 @@ The {security} uses the following files: -* `CONFIG_DIR/x-pack/roles.yml` defines the roles in use on the cluster +* `CONFIG_DIR/roles.yml` defines the roles in use on the cluster (read more <>). * `CONFIG_DIR/elasticsearch-users` defines the users and their hashed passwords for @@ -12,12 +12,12 @@ The {security} uses the following files: * `CONFIG_DIR/elasticsearch-users_roles` defines the user roles assignment for the the <>. -* `CONFIG_DIR/x-pack/role_mapping.yml` defines the role assignments for a +* `CONFIG_DIR/role_mapping.yml` defines the role assignments for a Distinguished Name (DN) to a role. This allows for LDAP and Active Directory groups and users and PKI users to be mapped to roles (read more <>). -* `CONFIG_DIR/x-pack/log4j2.properties` contains audit information (read more +* `CONFIG_DIR/log4j2.properties` contains audit information (read more <>). [[security-files-location]] diff --git a/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc b/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc index affac534b6f01..d93d4e523d9f2 100644 --- a/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc +++ b/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc @@ -41,7 +41,7 @@ instances: `.env`: [source,yaml] ---- -CERTS_DIR=/usr/share/elasticsearch/config/x-pack/certificates <1> +CERTS_DIR=/usr/share/elasticsearch/config/certificates <1> ELASTIC_PASSWORD=PleaseChangeMe <2> ---- <1> The path, inside the Docker image, where certificates are expected to be found. @@ -66,18 +66,18 @@ services: image: docker.elastic.co/elasticsearch/elasticsearch-platinum:{version} command: > bash -c ' - if [[ ! -d config/x-pack/certificates/certs ]]; then - mkdir config/x-pack/certificates/certs; + if [[ ! -d config/certificates/certs ]]; then + mkdir config/certificates/certs; fi; if [[ ! -f /local/certs/bundle.zip ]]; then - bin/elasticsearch-certgen --silent --in config/x-pack/certificates/instances.yml --out config/x-pack/certificates/certs/bundle.zip; - unzip config/x-pack/certificates/certs/bundle.zip -d config/x-pack/certificates/certs; <1> + bin/elasticsearch-certgen --silent --in config/certificates/instances.yml --out config/certificates/certs/bundle.zip; + unzip config/certificates/certs/bundle.zip -d config/certificates/certs; <1> fi; - chgrp -R 0 config/x-pack/certificates/certs + chgrp -R 0 config/certificates/certs ' user: $\{UID:-1000\} working_dir: /usr/share/elasticsearch - volumes: ['.:/usr/share/elasticsearch/config/x-pack/certificates'] + volumes: ['.:/usr/share/elasticsearch/config/certificates'] ---- <1> The new node certificates and CA certificate+key are placed under the local directory `certs`. @@ -184,9 +184,9 @@ WARNING: Windows users not running PowerShell will need to remove `\` and join l ---- docker exec es01 /bin/bash -c "bin/elasticsearch-setup-passwords \ auto --batch \ --Expack.ssl.certificate=x-pack/certificates/es01/es01.crt \ --Expack.ssl.certificate_authorities=x-pack/certificates/ca/ca.crt \ --Expack.ssl.key=x-pack/certificates/es01/es01.key \ +-Expack.ssl.certificate=certificates/es01/es01.crt \ +-Expack.ssl.certificate_authorities=certificates/ca/ca.crt \ +-Expack.ssl.key=certificates/es01/es01.key \ --url https://localhost:9200" ---- -- diff --git a/x-pack/docs/en/security/securing-communications/tls-http.asciidoc b/x-pack/docs/en/security/securing-communications/tls-http.asciidoc index dae088667c6fc..eb8e985a65b59 100644 --- a/x-pack/docs/en/security/securing-communications/tls-http.asciidoc +++ b/x-pack/docs/en/security/securing-communications/tls-http.asciidoc @@ -40,9 +40,9 @@ This name should match the `keystore.path` value. [source, yaml] -------------------------------------------------- xpack.security.http.ssl.enabled: true -xpack.security.http.ssl.key: /home/es/config/x-pack/node01.key <1> -xpack.security.http.ssl.certificate: /home/es/config/x-pack/node01.crt <2> -xpack.security.http.ssl.certificate_authorities: [ "/home/es/config/x-pack/ca.crt" ] <3> +xpack.security.http.ssl.key: /home/es/config/node01.key <1> +xpack.security.http.ssl.certificate: /home/es/config/node01.crt <2> +xpack.security.http.ssl.certificate_authorities: [ "/home/es/config/ca.crt" ] <3> -------------------------------------------------- <1> The full path to the node key file. This must be a location within the {es} configuration directory. diff --git a/x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc b/x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc index f10ced77f718a..b7f0b7d300590 100644 --- a/x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc +++ b/x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc @@ -29,7 +29,7 @@ xpack: order: 0 url: "ldaps://ldap.example.com:636" ssl: - certificate_authorities: [ "CONFIG_DIR/x-pack/cacert.pem" ] + certificate_authorities: [ "CONFIG_DIR/cacert.pem" ] -------------------------------------------------- The CA certificate must be a PEM encoded. diff --git a/x-pack/docs/en/security/securing-communications/tls-transport.asciidoc b/x-pack/docs/en/security/securing-communications/tls-transport.asciidoc index 9bce211a1e278..2e20a20f907ef 100644 --- a/x-pack/docs/en/security/securing-communications/tls-transport.asciidoc +++ b/x-pack/docs/en/security/securing-communications/tls-transport.asciidoc @@ -43,9 +43,9 @@ This name should match the `keystore.path` value. -------------------------------------------------- xpack.security.transport.ssl.enabled: true xpack.security.transport.ssl.verification_mode: certificate <1> -xpack.security.transport.ssl.key: /home/es/config/x-pack/node01.key <2> -xpack.security.transport.ssl.certificate: /home/es/config/x-pack/node01.crt <3> -xpack.security.transport.ssl.certificate_authorities: [ "/home/es/config/x-pack/ca.crt" ] <4> +xpack.security.transport.ssl.key: /home/es/config/node01.key <2> +xpack.security.transport.ssl.certificate: /home/es/config/node01.crt <3> +xpack.security.transport.ssl.certificate_authorities: [ "/home/es/config/ca.crt" ] <4> -------------------------------------------------- <1> If you used the `--dns` or `--ip` options with the `elasticsearch-certutil cert` command and you want to enable strict hostname checking, set the verification mode to diff --git a/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc b/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc index a9150ec056c1e..6ee9c29b44ff3 100644 --- a/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc +++ b/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc @@ -43,7 +43,7 @@ to each user. If you use files to manage the role mappings, you must configure a YAML file and copy it to each node in the cluster. By default, role mappings are stored in -`ES_PATH_CONF/x-pack/role_mapping.yml`. Alternatively, you can specify a +`ES_PATH_CONF/role_mapping.yml`. Alternatively, you can specify a different role mapping file for each type of realm and specify its location in the `elasticsearch.yml` file. For more information, see {xpack-ref}/mapping-roles.html#mapping-roles-file[Using Role Mapping Files]. diff --git a/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc index c24668a688dca..57d330510971d 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc @@ -213,7 +213,7 @@ minute during the weekend: {xpack} ships with a `elasticsearch-croneval` command line tool that you can use to verify that your cron expressions are valid and produce the expected results. This tool is -provided in the `$ES_HOME/bin/x-pack` directory. +provided in the `$ES_HOME/bin` directory. To verify a cron expression, simply pass it in as a parameter to `elasticsearch-croneval`: From 792827061065e402c8f80ce849e6f613d7536d39 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 14 May 2018 13:13:26 -0700 Subject: [PATCH 06/74] [DOCS] Fix realm setting names (#30499) --- .../security/authorization/mapping-roles.asciidoc | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/x-pack/docs/en/security/authorization/mapping-roles.asciidoc b/x-pack/docs/en/security/authorization/mapping-roles.asciidoc index 2c1f1998c6883..fba87db978626 100644 --- a/x-pack/docs/en/security/authorization/mapping-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/mapping-roles.asciidoc @@ -39,14 +39,11 @@ this. By default, role mappings are stored in `ES_PATH_CONF/role_mapping.yml`, where `ES_PATH_CONF` is `ES_HOME/config` (zip/tar installations) or `/etc/elasticsearch` (package installations). To specify a different location, -you configure the `files.role_mapping` realm settings in `elasticsearch.yml`. -This setting enables you to use a different set of mappings for each realm type: - -|===== -| `xpack.security.authc.ldap.files.role_mapping` | | | The location of the role mappings for LDAP realms. -| `xpack.security.authc.active_directory.files.role_mapping` | | | The location of the role mappings for Active Directory realms. -| `xpack.security.authc.pki.files.role_mapping` | | | The location of the role mappings for PKI realms. -|===== +you configure the `files.role_mapping` setting in the +{ref}/security-settings.html#ref-ad-settings[Active Directory], +{ref}/security-settings.html#ref-ldap-settings[LDAP], and +{ref}/security-settings.html#ref-pki-settings[PKI] realm settings in +`elasticsearch.yml`. Within the role mapping file, the security roles are keys and groups and users are values. The mappings can have a many-to-many relationship. When you map roles From 1b0e6ee89f852e4e99c6e81f55878f4d8bd4bbef Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 14 May 2018 13:32:09 -0700 Subject: [PATCH 07/74] Deprecate Empty Templates (#30194) Deprecate the use of empty templates. Bug fix allows empty templates/scripts to be loaded on start up for upgrades/restarts, but empty templates can no longer be created. --- .../elasticsearch/script/ScriptMetaData.java | 21 ++++++- .../script/StoredScriptSource.java | 62 ++++++++++++++++--- .../script/ScriptMetaDataTests.java | 41 ++++++++++++ .../script/StoredScriptSourceTests.java | 2 +- .../script/StoredScriptTests.java | 36 ++++++++++- 5 files changed, 148 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java b/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java index dca17ce486607..9505875ae1ebc 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java @@ -29,6 +29,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -46,6 +48,11 @@ */ public final class ScriptMetaData implements MetaData.Custom, Writeable, ToXContentFragment { + /** + * Standard deprecation logger for used to deprecate allowance of empty templates. + */ + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ScriptMetaData.class)); + /** * A builder used to modify the currently stored scripts data held within * the {@link ClusterState}. Scripts can be added or deleted, then built @@ -161,8 +168,8 @@ static ScriptMetaData deleteStoredScript(ScriptMetaData previous, String id) { * * {@code * { - * "" : "<{@link StoredScriptSource#fromXContent(XContentParser)}>", - * "" : "<{@link StoredScriptSource#fromXContent(XContentParser)}>", + * "" : "<{@link StoredScriptSource#fromXContent(XContentParser, boolean)}>", + * "" : "<{@link StoredScriptSource#fromXContent(XContentParser, boolean)}>", * ... * } * } @@ -209,6 +216,14 @@ public static ScriptMetaData fromXContent(XContentParser parser) throws IOExcept lang = id.substring(0, split); id = id.substring(split + 1); source = new StoredScriptSource(lang, parser.text(), Collections.emptyMap()); + + if (source.getSource().isEmpty()) { + if (source.getLang().equals(Script.DEFAULT_TEMPLATE_LANG)) { + DEPRECATION_LOGGER.deprecated("empty templates should no longer be used"); + } else { + DEPRECATION_LOGGER.deprecated("empty scripts should no longer be used"); + } + } } exists = scripts.get(id); @@ -231,7 +246,7 @@ public static ScriptMetaData fromXContent(XContentParser parser) throws IOExcept } exists = scripts.get(id); - source = StoredScriptSource.fromXContent(parser); + source = StoredScriptSource.fromXContent(parser, true); if (exists == null) { scripts.put(id, source); diff --git a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java index 9c52ff943d2a1..da6dad1dff384 100644 --- a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java +++ b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java @@ -32,6 +32,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ObjectParser; @@ -57,6 +59,11 @@ */ public class StoredScriptSource extends AbstractDiffable implements Writeable, ToXContentObject { + /** + * Standard deprecation logger for used to deprecate allowance of empty templates. + */ + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(StoredScriptSource.class)); + /** * Standard {@link ParseField} for outer level of stored script source. */ @@ -109,7 +116,7 @@ private void setLang(String lang) { private void setSource(XContentParser parser) { try { if (parser.currentToken() == Token.START_OBJECT) { - //this is really for search templates, that need to be converted to json format + // this is really for search templates, that need to be converted to json format XContentBuilder builder = XContentFactory.jsonBuilder(); source = Strings.toString(builder.copyCurrentStructure(parser)); options.put(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()); @@ -131,8 +138,12 @@ private void setOptions(Map options) { /** * Validates the parameters and creates an {@link StoredScriptSource}. + * + * @param ignoreEmpty Specify as {@code true} to ignoreEmpty the empty source check. + * This allow empty templates to be loaded for backwards compatibility. + * This allow empty templates to be loaded for backwards compatibility. */ - private StoredScriptSource build() { + private StoredScriptSource build(boolean ignoreEmpty) { if (lang == null) { throw new IllegalArgumentException("must specify lang for stored script"); } else if (lang.isEmpty()) { @@ -140,9 +151,25 @@ private StoredScriptSource build() { } if (source == null) { - throw new IllegalArgumentException("must specify source for stored script"); + if (ignoreEmpty || Script.DEFAULT_TEMPLATE_LANG.equals(lang)) { + if (Script.DEFAULT_TEMPLATE_LANG.equals(lang)) { + DEPRECATION_LOGGER.deprecated("empty templates should no longer be used"); + } else { + DEPRECATION_LOGGER.deprecated("empty scripts should no longer be used"); + } + } else { + throw new IllegalArgumentException("must specify source for stored script"); + } } else if (source.isEmpty()) { - throw new IllegalArgumentException("source cannot be empty"); + if (ignoreEmpty || Script.DEFAULT_TEMPLATE_LANG.equals(lang)) { + if (Script.DEFAULT_TEMPLATE_LANG.equals(lang)) { + DEPRECATION_LOGGER.deprecated("empty templates should no longer be used"); + } else { + DEPRECATION_LOGGER.deprecated("empty scripts should no longer be used"); + } + } else { + throw new IllegalArgumentException("source cannot be empty"); + } } if (options.size() > 1 || options.size() == 1 && options.get(Script.CONTENT_TYPE_OPTION) == null) { @@ -257,6 +284,8 @@ public static StoredScriptSource parse(BytesReference content, XContentType xCon token = parser.nextToken(); if (token == Token.END_OBJECT) { + DEPRECATION_LOGGER.deprecated("empty templates should no longer be used"); + return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, "", Collections.emptyMap()); } @@ -271,7 +300,7 @@ public static StoredScriptSource parse(BytesReference content, XContentType xCon token = parser.nextToken(); if (token == Token.START_OBJECT) { - return PARSER.apply(parser, null).build(); + return PARSER.apply(parser, null).build(false); } else { throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "], expected [{, ]"); } @@ -280,7 +309,13 @@ public static StoredScriptSource parse(BytesReference content, XContentType xCon token = parser.nextToken(); if (token == Token.VALUE_STRING) { - return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, parser.text(), Collections.emptyMap()); + String source = parser.text(); + + if (source == null || source.isEmpty()) { + DEPRECATION_LOGGER.deprecated("empty templates should no longer be used"); + } + + return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, source, Collections.emptyMap()); } } @@ -293,7 +328,13 @@ public static StoredScriptSource parse(BytesReference content, XContentType xCon builder.copyCurrentStructure(parser); } - return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, Strings.toString(builder), Collections.emptyMap()); + String source = Strings.toString(builder); + + if (source == null || source.isEmpty()) { + DEPRECATION_LOGGER.deprecated("empty templates should no longer be used"); + } + + return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, source, Collections.emptyMap()); } } } catch (IOException ioe) { @@ -320,9 +361,12 @@ public static StoredScriptSource parse(BytesReference content, XContentType xCon * * Note that the "source" parameter can also handle template parsing including from * a complex JSON object. + * + * @param ignoreEmpty Specify as {@code true} to ignoreEmpty the empty source check. + * This allows empty templates to be loaded for backwards compatibility. */ - public static StoredScriptSource fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); + public static StoredScriptSource fromXContent(XContentParser parser, boolean ignoreEmpty) { + return PARSER.apply(parser, null).build(ignoreEmpty); } /** diff --git a/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java b/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java index d5769cd192b75..32d4d48a44810 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java @@ -22,6 +22,8 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -130,6 +132,45 @@ public void testBuilder() { assertEquals("1 + 1", result.getStoredScript("_id").getSource()); } + public void testLoadEmptyScripts() throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject().field("mustache#empty", "").endObject(); + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); + ScriptMetaData.fromXContent(parser); + assertWarnings("empty templates should no longer be used"); + + builder = XContentFactory.jsonBuilder(); + builder.startObject().field("lang#empty", "").endObject(); + parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); + ScriptMetaData.fromXContent(parser); + assertWarnings("empty scripts should no longer be used"); + + builder = XContentFactory.jsonBuilder(); + builder.startObject().startObject("script").field("lang", "lang").field("source", "").endObject().endObject(); + parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); + ScriptMetaData.fromXContent(parser); + assertWarnings("empty scripts should no longer be used"); + + builder = XContentFactory.jsonBuilder(); + builder.startObject().startObject("script").field("lang", "mustache").field("source", "").endObject().endObject(); + parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); + ScriptMetaData.fromXContent(parser); + assertWarnings("empty templates should no longer be used"); + } + + @Override + protected boolean enableWarningsCheck() { + return true; + } + private ScriptMetaData randomScriptMetaData(XContentType sourceContentType, int minNumberScripts) throws IOException { ScriptMetaData.Builder builder = new ScriptMetaData.Builder(null); int numScripts = scaledRandomIntBetween(minNumberScripts, 32); diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java index 168ec4fc553b9..8aa4ca57acfed 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java @@ -58,7 +58,7 @@ protected StoredScriptSource createTestInstance() { @Override protected StoredScriptSource doParseInstance(XContentParser parser) { - return StoredScriptSource.fromXContent(parser); + return StoredScriptSource.fromXContent(parser, false); } @Override diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java index 2bf0216c546ec..79e3195f3d923 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.AbstractSerializingTestCase; +import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -204,6 +205,39 @@ public void testSourceParsingErrors() throws Exception { } } + public void testEmptyTemplateDeprecations() throws IOException { + try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + builder.startObject().endObject(); + + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); + StoredScriptSource source = new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, "", Collections.emptyMap()); + + assertThat(parsed, equalTo(source)); + assertWarnings("empty templates should no longer be used"); + } + + try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + builder.startObject().field("template", "").endObject(); + + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); + StoredScriptSource source = new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, "", Collections.emptyMap()); + + assertThat(parsed, equalTo(source)); + assertWarnings("empty templates should no longer be used"); + } + + try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + builder.startObject().field("script").startObject().field("lang", "mustache") + .field("source", "").endObject().endObject(); + + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); + StoredScriptSource source = new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, "", Collections.emptyMap()); + + assertThat(parsed, equalTo(source)); + assertWarnings("empty templates should no longer be used"); + } + } + @Override protected StoredScriptSource createTestInstance() { return new StoredScriptSource( @@ -219,7 +253,7 @@ protected Writeable.Reader instanceReader() { @Override protected StoredScriptSource doParseInstance(XContentParser parser) { - return StoredScriptSource.fromXContent(parser); + return StoredScriptSource.fromXContent(parser, false); } @Override From 4e33443690efaa837b45b89e11459a99650962c4 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 14 May 2018 16:41:25 -0400 Subject: [PATCH 08/74] Adjust versions for resize copy settings (#30578) Now that the change to deprecate copy settings and disallow it being explicitly set to false is backported, this commit adjusts the BWC versions in master. --- .../rest-api-spec/test/indices.shrink/10_basic.yml | 4 ++-- .../test/indices.shrink/20_source_mapping.yml | 4 ++-- .../test/indices.shrink/30_copy_settings.yml | 4 ++-- .../rest-api-spec/test/indices.split/10_basic.yml | 12 ++++++------ .../test/indices.split/20_source_mapping.yml | 4 ++-- .../test/indices.split/30_copy_settings.yml | 4 ++-- .../action/admin/indices/shrink/ResizeRequest.java | 5 +---- 7 files changed, 17 insertions(+), 20 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml index a88b37ead3154..f94cf286fd898 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml @@ -1,8 +1,8 @@ --- "Shrink index via API": - skip: - version: " - 6.99.99" - reason: expects warnings that pre-7.0.0 will not send + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send features: "warnings" # creates an index with one document solely allocated on the master node # and shrinks it into a new index with a single shard diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml index ee7b2215d2187..6f532ff81c688 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml @@ -1,8 +1,8 @@ --- "Shrink index ignores target template mapping": - skip: - version: " - 6.99.99" - reason: expects warnings that pre-7.0.0 will not send + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send features: "warnings" - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml index 50438384b3ab0..53a12aad787f7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml @@ -1,8 +1,8 @@ --- "Copy settings during shrink index": - skip: - version: " - 6.99.99" - reason: expects warnings that pre-7.0.0 will not send + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send features: "warnings" - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml index 635673c182f2f..4f645d3eb3e0b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml @@ -33,8 +33,8 @@ setup: --- "Split index via API": - skip: - version: " - 6.99.99" - reason: expects warnings that pre-7.0.0 will not send + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send features: "warnings" # make it read-only @@ -110,8 +110,8 @@ setup: # when re-enabling uncomment the below skips version: "all" reason: "AwaitsFix'ing, see https://github.com/elastic/elasticsearch/issues/30503" - # version: " - 6.99.99" - # reason: expects warnings that pre-7.0.0 will not send + # version: " - 6.3.99" + # reason: expects warnings that pre-6.4.0 will not send features: "warnings" - do: indices.create: @@ -213,8 +213,8 @@ setup: --- "Create illegal split indices": - skip: - version: " - 6.99.99" - reason: expects warnings that pre-7.0.0 will not send + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send features: "warnings" # try to do an illegal split with number_of_routing_shards set diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml index 433ac040dd1e4..4bac4bf5b0807 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml @@ -4,8 +4,8 @@ # when re-enabling uncomment the below skips version: "all" reason: "AwaitsFix'ing, see https://github.com/elastic/elasticsearch/issues/30503" - # version: " - 6.99.99" - # reason: expects warnings that pre-7.0.0 will not send + # version: " - 6.3.99" + # reason: expects warnings that pre-6.4.0 will not send features: "warnings" # create index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml index e0ace991f4f0d..9e64b2b8130ad 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml @@ -1,8 +1,8 @@ --- "Copy settings during split index": - skip: - version: " - 6.99.99" - reason: expects warnings that pre-7.0.0 will not send + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send features: "warnings" - do: diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java index e510c0719df2d..ca046c48accff 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java @@ -101,8 +101,6 @@ public void readFrom(StreamInput in) throws IOException { } if (in.getVersion().before(Version.V_6_4_0)) { copySettings = null; - } else if (in.getVersion().onOrAfter(Version.V_6_4_0) && in.getVersion().before(Version.V_7_0_0_alpha1)){ - copySettings = in.readBoolean(); } else { copySettings = in.readOptionalBoolean(); } @@ -116,10 +114,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(ResizeAction.COMPATIBILITY_VERSION)) { out.writeEnum(type); } + // noinspection StatementWithEmptyBody if (out.getVersion().before(Version.V_6_4_0)) { - } else if (out.getVersion().onOrAfter(Version.V_6_4_0) && out.getVersion().before(Version.V_7_0_0_alpha1)) { - out.writeBoolean(copySettings == null ? false : copySettings); } else { out.writeOptionalBoolean(copySettings); } From 56d32bc8b2c73417df0d3e41206ef128f09558e5 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Mon, 14 May 2018 16:43:29 -0400 Subject: [PATCH 09/74] SQL: Extract SQL request and response classes (#30457) Extracts SQL request and response classes. This is the first step towards creation of a small minimal dependencies jdbc driver. Relates #29856 --- .../xpack/sql/jdbc/jdbc/JdbcConnection.java | 6 +- .../xpack/sql/jdbc/jdbc/JdbcStatement.java | 6 +- .../xpack/sql/jdbc/jdbc/PreparedQuery.java | 6 +- .../sql/jdbc/net/client/JdbcHttpClient.java | 33 +-- .../sql/jdbc/jdbc/TypeConverterTests.java | 4 +- .../xpack/sql/cli/command/CliSession.java | 5 +- .../sql/cli/command/ServerInfoCliCommand.java | 4 +- .../cli/command/ServerQueryCliCommand.java | 8 +- .../xpack/sql/cli/CliSessionTests.java | 8 +- .../command/ServerInfoCliCommandTests.java | 6 +- .../command/ServerQueryCliCommandTests.java | 6 +- .../sql/plugin/AbstractSqlQueryRequest.java | 76 +++---- .../xpack/sql/plugin/AbstractSqlRequest.java | 20 +- .../xpack/sql/plugin/CliFormatter.java | 29 +-- .../xpack/sql/plugin/MetaColumnInfo.java | 191 ------------------ .../sql/plugin/SqlClearCursorAction.java | 1 - .../sql/plugin/SqlClearCursorRequest.java | 20 +- .../sql/plugin/SqlClearCursorResponse.java | 17 +- .../xpack/sql/plugin/SqlQueryAction.java | 1 - .../xpack/sql/plugin/SqlQueryRequest.java | 26 +-- .../sql/plugin/SqlQueryRequestBuilder.java | 13 +- .../xpack/sql/plugin/SqlQueryResponse.java | 86 +++----- .../xpack/sql/plugin/SqlTranslateRequest.java | 13 ++ .../plugin/SqlTranslateRequestBuilder.java | 14 +- .../xpack/sql/proto/AbstractSqlRequest.java | 42 ++++ .../sql/{plugin => proto}/ColumnInfo.java | 56 ++--- .../xpack/sql/proto/MainResponse.java | 107 ++++++++++ .../elasticsearch/xpack/sql/proto/Mode.java | 30 +++ .../xpack/sql/proto/Protocol.java | 31 +++ .../sql/proto/SqlClearCursorRequest.java | 48 +++++ .../sql/proto/SqlClearCursorResponse.java | 61 ++++++ .../xpack/sql/proto/SqlQueryRequest.java | 172 ++++++++++++++++ .../xpack/sql/proto/SqlQueryResponse.java | 122 +++++++++++ .../{plugin => proto}/SqlTypedParamValue.java | 26 +-- .../plugin/SqlClearCursorRequestTests.java | 7 +- .../plugin/SqlClearCursorResponseTests.java | 4 +- .../sql/plugin/SqlQueryRequestTests.java | 18 +- .../sql/plugin/SqlQueryResponseTests.java | 5 +- .../sql/plugin/SqlTranslateRequestTests.java | 7 +- .../xpack/sql/client/HttpClient.java | 37 ++-- .../xpack/sql/execution/PlanExecutor.java | 2 +- .../xpack/sql/parser/AstBuilder.java | 2 +- .../xpack/sql/parser/CommandBuilder.java | 4 +- .../xpack/sql/parser/ExpressionBuilder.java | 4 +- .../xpack/sql/parser/LogicalPlanBuilder.java | 2 +- .../xpack/sql/parser/SqlParser.java | 2 +- .../sql/plugin/RestSqlClearCursorAction.java | 10 +- .../xpack/sql/plugin/RestSqlQueryAction.java | 8 +- .../sql/plugin/RestSqlTranslateAction.java | 3 +- .../xpack/sql/plugin/SqlLicenseChecker.java | 8 +- .../xpack/sql/plugin/TextFormat.java | 9 +- .../sql/plugin/TransportSqlQueryAction.java | 3 +- .../xpack/sql/session/Configuration.java | 8 +- .../xpack/sql/session/SqlSession.java | 4 +- .../xpack/sql/action/SqlActionIT.java | 4 +- .../sql/execution/search/CursorTests.java | 5 +- .../xpack/sql/expression/ParameterTests.java | 18 +- .../sql/parser/LikeEscapingParsingTests.java | 8 +- .../logical/command/sys/SysTablesTests.java | 4 +- .../xpack/sql/plugin/CliFormatterTests.java | 17 +- .../xpack/sql/plugin/TextFormatTests.java | 1 + 61 files changed, 909 insertions(+), 589 deletions(-) delete mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/MetaColumnInfo.java create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/AbstractSqlRequest.java rename x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/{plugin => proto}/ColumnInfo.java (70%) create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/MainResponse.java create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorRequest.java create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorResponse.java create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java create mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java rename x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/{plugin => proto}/SqlTypedParamValue.java (76%) diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConnection.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConnection.java index 17f8973cea386..0eb1888487cf1 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConnection.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConnection.java @@ -43,6 +43,10 @@ public class JdbcConnection implements Connection, JdbcWrapper { private String catalog; private String schema; + /** + * The SQLException is the only type of Exception the JDBC API can throw (and that the user expects). + * If we remove it, we need to make sure no other types of Exceptions (runtime or otherwise) are thrown + */ public JdbcConnection(JdbcConfiguration connectionInfo) throws SQLException { cfg = connectionInfo; client = new JdbcHttpClient(connectionInfo); @@ -428,4 +432,4 @@ int esInfoMajorVersion() throws SQLException { int esInfoMinorVersion() throws SQLException { return client.serverInfo().minorVersion; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcStatement.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcStatement.java index fab21c541799e..c773dd5d17dc1 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcStatement.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcStatement.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.jdbc.net.client.Cursor; import org.elasticsearch.xpack.sql.jdbc.net.client.RequestMeta; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.sql.Connection; import java.sql.ResultSet; @@ -220,7 +220,7 @@ public int getFetchSize() throws SQLException { // unset (in this case -1 which the user cannot set) - in this case, the default fetch size is returned // 0 meaning the hint is disabled (the user has called setFetch) // >0 means actual hint - + // tl;dr - unless the user set it, returning the default is fine return requestMeta.fetchSize(); } @@ -402,4 +402,4 @@ final void resultSetWasClosed() throws SQLException { close(); } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/PreparedQuery.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/PreparedQuery.java index 4aaf337f2b772..06825ee6e3f96 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/PreparedQuery.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/PreparedQuery.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.jdbc.jdbc; import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.type.DataType; import java.sql.JDBCType; @@ -73,7 +73,7 @@ String sql() { */ List params() { return Arrays.stream(this.params).map( - paramInfo -> new SqlTypedParamValue(paramInfo.value, DataType.fromJdbcType(paramInfo.type)) + paramInfo -> new SqlTypedParamValue(DataType.fromJdbcType(paramInfo.type), paramInfo.value) ).collect(Collectors.toList()); } @@ -86,4 +86,4 @@ public String toString() { static PreparedQuery prepare(String sql) throws SQLException { return new PreparedQuery(sql, SqlQueryParameterAnalyzer.parametersCount(sql)); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/client/JdbcHttpClient.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/client/JdbcHttpClient.java index ab4cdff985863..89ee78e0bae9e 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/client/JdbcHttpClient.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/client/JdbcHttpClient.java @@ -5,22 +5,21 @@ */ package org.elasticsearch.xpack.sql.jdbc.net.client; -import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.sql.client.HttpClient; import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration; import org.elasticsearch.xpack.sql.jdbc.net.protocol.ColumnInfo; import org.elasticsearch.xpack.sql.jdbc.net.protocol.InfoResponse; -import org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest; -import org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest; -import org.elasticsearch.xpack.sql.plugin.SqlQueryRequest; -import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.Protocol; +import org.elasticsearch.xpack.sql.proto.SqlQueryRequest; +import org.elasticsearch.xpack.sql.proto.MainResponse; +import org.elasticsearch.xpack.sql.proto.SqlQueryResponse; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.sql.SQLException; import java.util.List; -import java.util.TimeZone; import java.util.stream.Collectors; import static org.elasticsearch.xpack.sql.client.shared.StringUtils.EMPTY; @@ -34,6 +33,10 @@ public class JdbcHttpClient { private final JdbcConfiguration conCfg; private InfoResponse serverInfo; + /** + * The SQLException is the only type of Exception the JDBC API can throw (and that the user expects). + * If we remove it, we need to make sure no other types of Exceptions (runtime or otherwise) are thrown + */ public JdbcHttpClient(JdbcConfiguration conCfg) throws SQLException { httpClient = new HttpClient(conCfg); this.conCfg = conCfg; @@ -45,9 +48,9 @@ public boolean ping(long timeoutInMs) throws SQLException { public Cursor query(String sql, List params, RequestMeta meta) throws SQLException { int fetch = meta.fetchSize() > 0 ? meta.fetchSize() : conCfg.pageSize(); - SqlQueryRequest sqlRequest = new SqlQueryRequest(AbstractSqlRequest.Mode.JDBC, sql, params, null, - AbstractSqlQueryRequest.DEFAULT_TIME_ZONE, - fetch, TimeValue.timeValueMillis(meta.timeoutInMs()), TimeValue.timeValueMillis(meta.queryTimeoutInMs()), ""); + SqlQueryRequest sqlRequest = new SqlQueryRequest(Mode.JDBC, sql, params, null, + Protocol.TIME_ZONE, + fetch, TimeValue.timeValueMillis(meta.timeoutInMs()), TimeValue.timeValueMillis(meta.queryTimeoutInMs())); SqlQueryResponse response = httpClient.query(sqlRequest); return new DefaultCursor(this, response.cursor(), toJdbcColumnInfo(response.columns()), response.rows(), meta); } @@ -57,10 +60,8 @@ public Cursor query(String sql, List params, RequestMeta met * the scroll id to use to fetch the next page. */ public Tuple>> nextPage(String cursor, RequestMeta meta) throws SQLException { - SqlQueryRequest sqlRequest = new SqlQueryRequest().cursor(cursor); - sqlRequest.mode(AbstractSqlRequest.Mode.JDBC); - sqlRequest.requestTimeout(TimeValue.timeValueMillis(meta.timeoutInMs())); - sqlRequest.pageTimeout(TimeValue.timeValueMillis(meta.queryTimeoutInMs())); + SqlQueryRequest sqlRequest = new SqlQueryRequest(Mode.JDBC, cursor, TimeValue.timeValueMillis(meta.timeoutInMs()), + TimeValue.timeValueMillis(meta.queryTimeoutInMs())); SqlQueryResponse response = httpClient.query(sqlRequest); return new Tuple<>(response.cursor(), response.rows()); } @@ -78,13 +79,13 @@ public InfoResponse serverInfo() throws SQLException { private InfoResponse fetchServerInfo() throws SQLException { MainResponse mainResponse = httpClient.serverInfo(); - return new InfoResponse(mainResponse.getClusterName().value(), mainResponse.getVersion().major, mainResponse.getVersion().minor); + return new InfoResponse(mainResponse.getClusterName(), mainResponse.getVersion().major, mainResponse.getVersion().minor); } /** * Converts REST column metadata into JDBC column metadata */ - private List toJdbcColumnInfo(List columns) { + private List toJdbcColumnInfo(List columns) { return columns.stream().map(columnInfo -> new ColumnInfo(columnInfo.name(), columnInfo.jdbcType(), EMPTY, EMPTY, EMPTY, EMPTY, columnInfo.displaySize()) ).collect(Collectors.toList()); diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverterTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverterTests.java index 612c46fbe56ef..dc4ba9fa244b5 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverterTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverterTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest; import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; +import org.elasticsearch.xpack.sql.proto.Mode; import org.joda.time.DateTime; import java.sql.JDBCType; @@ -51,7 +51,7 @@ private Object convertAsNative(Object value, JDBCType type) throws Exception { XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); builder.field("value"); - SqlQueryResponse.value(builder, AbstractSqlRequest.Mode.JDBC, value); + SqlQueryResponse.value(builder, Mode.JDBC, value); builder.endObject(); builder.close(); Object copy = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2().get("value"); diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java index 64f38c2254c5f..8e030f36dd042 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java @@ -5,11 +5,12 @@ */ package org.elasticsearch.xpack.sql.cli.command; -import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.xpack.sql.client.HttpClient; import org.elasticsearch.xpack.sql.client.shared.ClientException; import org.elasticsearch.xpack.sql.client.shared.Version; import org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest; +import org.elasticsearch.xpack.sql.proto.MainResponse; +import org.elasticsearch.xpack.sql.proto.Protocol; import java.sql.SQLException; @@ -18,7 +19,7 @@ */ public class CliSession { private final HttpClient httpClient; - private int fetchSize = AbstractSqlQueryRequest.DEFAULT_FETCH_SIZE; + private int fetchSize = Protocol.FETCH_SIZE; private String fetchSeparator = ""; private boolean debug; diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerInfoCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerInfoCliCommand.java index 635c041da7ae6..e637386f9798f 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerInfoCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerInfoCliCommand.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.sql.cli.command; -import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.xpack.sql.cli.CliTerminal; +import org.elasticsearch.xpack.sql.proto.MainResponse; import java.sql.SQLException; import java.util.Locale; @@ -30,7 +30,7 @@ public boolean doHandle(CliTerminal terminal, CliSession cliSession, String line } terminal.line() .text("Node:").em(info.getNodeName()) - .text(" Cluster:").em(info.getClusterName().value()) + .text(" Cluster:").em(info.getClusterName()) .text(" Version:").em(info.getVersion().toString()) .ln(); return true; diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java index c1fc609c50b8f..aa8bc499cd29e 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.client.HttpClient; import org.elasticsearch.xpack.sql.client.shared.JreHttpUrlConnection; import org.elasticsearch.xpack.sql.plugin.CliFormatter; -import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; +import org.elasticsearch.xpack.sql.proto.SqlQueryResponse; import java.sql.SQLException; @@ -23,8 +23,8 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, String l String data; try { response = cliClient.queryInit(line, cliSession.getFetchSize()); - cliFormatter = new CliFormatter(response); - data = cliFormatter.formatWithHeader(response); + cliFormatter = new CliFormatter(response.columns(), response.rows()); + data = cliFormatter.formatWithHeader(response.columns(), response.rows()); while (true) { handleText(terminal, data); if (response.cursor().isEmpty()) { @@ -36,7 +36,7 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, String l terminal.println(cliSession.getFetchSeparator()); } response = cliSession.getClient().nextPage(response.cursor()); - data = cliFormatter.formatWithoutHeader(response); + data = cliFormatter.formatWithoutHeader(response.rows()); } } catch (SQLException e) { if (JreHttpUrlConnection.SQL_STATE_BAD_SERVER.equals(e.getSQLState())) { diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java index befcddf9e7d25..e5643ad443a59 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.sql.cli; import org.elasticsearch.Build; -import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.UUIDs; import org.elasticsearch.test.ESTestCase; @@ -14,6 +13,7 @@ import org.elasticsearch.xpack.sql.client.HttpClient; import org.elasticsearch.xpack.sql.client.shared.ClientException; import org.elasticsearch.xpack.sql.client.shared.Version; +import org.elasticsearch.xpack.sql.proto.MainResponse; import java.sql.SQLException; @@ -28,7 +28,7 @@ public class CliSessionTests extends ESTestCase { public void testProperConnection() throws Exception { HttpClient httpClient = mock(HttpClient.class); when(httpClient.serverInfo()).thenReturn(new MainResponse(randomAlphaOfLength(5), org.elasticsearch.Version.CURRENT, - ClusterName.DEFAULT, UUIDs.randomBase64UUID(), Build.CURRENT)); + ClusterName.DEFAULT.value(), UUIDs.randomBase64UUID(), Build.CURRENT)); CliSession cliSession = new CliSession(httpClient); cliSession.checkConnection(); verify(httpClient, times(1)).serverInfo(); @@ -58,10 +58,10 @@ public void testWrongServerVersion() throws Exception { } when(httpClient.serverInfo()).thenReturn(new MainResponse(randomAlphaOfLength(5), org.elasticsearch.Version.fromString(major + "." + minor + ".23"), - ClusterName.DEFAULT, UUIDs.randomBase64UUID(), Build.CURRENT)); + ClusterName.DEFAULT.value(), UUIDs.randomBase64UUID(), Build.CURRENT)); CliSession cliSession = new CliSession(httpClient); expectThrows(ClientException.class, cliSession::checkConnection); verify(httpClient, times(1)).serverInfo(); verifyNoMoreInteractions(httpClient); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerInfoCliCommandTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerInfoCliCommandTests.java index 567cd10531d71..e99cb2fb7f7e2 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerInfoCliCommandTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerInfoCliCommandTests.java @@ -6,12 +6,12 @@ package org.elasticsearch.xpack.sql.cli.command; import org.elasticsearch.Build; -import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.UUIDs; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.cli.TestTerminal; import org.elasticsearch.xpack.sql.client.HttpClient; +import org.elasticsearch.xpack.sql.proto.MainResponse; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -36,7 +36,7 @@ public void testShowInfo() throws Exception { HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); when(client.serverInfo()).thenReturn(new MainResponse("my_node", org.elasticsearch.Version.fromString("1.2.3"), - new ClusterName("my_cluster"), UUIDs.randomBase64UUID(), Build.CURRENT)); + new ClusterName("my_cluster").value(), UUIDs.randomBase64UUID(), Build.CURRENT)); ServerInfoCliCommand cliCommand = new ServerInfoCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "info")); assertEquals(testTerminal.toString(), "Node:my_node Cluster:my_cluster Version:1.2.3\n"); @@ -44,4 +44,4 @@ public void testShowInfo() throws Exception { verifyNoMoreInteractions(client); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java index 4385731313aaf..86ebfa52fe49f 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.cli.TestTerminal; import org.elasticsearch.xpack.sql.client.HttpClient; -import org.elasticsearch.xpack.sql.plugin.ColumnInfo; -import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; +import org.elasticsearch.xpack.sql.proto.SqlQueryResponse; import java.sql.JDBCType; import java.sql.SQLException; @@ -119,4 +119,4 @@ private SqlQueryResponse fakeResponse(String cursor, boolean includeColumns, Str } return new SqlQueryResponse(cursor, columns, rows); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlQueryRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlQueryRequest.java index 8969b88161935..8d34d59c1e0af 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlQueryRequest.java @@ -10,12 +10,17 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.Protocol; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.type.DataType; import java.io.IOException; import java.util.Collections; @@ -28,20 +33,12 @@ * Base class for requests that contain sql queries (Query and Translate) */ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest implements CompositeIndicesRequest, ToXContentFragment { - public static final TimeZone DEFAULT_TIME_ZONE = TimeZone.getTimeZone("UTC"); - - /** - * Global choice for the default fetch size. - */ - public static final int DEFAULT_FETCH_SIZE = 1000; - public static final TimeValue DEFAULT_REQUEST_TIMEOUT = TimeValue.timeValueSeconds(90); - public static final TimeValue DEFAULT_PAGE_TIMEOUT = TimeValue.timeValueSeconds(45); private String query = ""; - private TimeZone timeZone = DEFAULT_TIME_ZONE; - private int fetchSize = DEFAULT_FETCH_SIZE; - private TimeValue requestTimeout = DEFAULT_REQUEST_TIMEOUT; - private TimeValue pageTimeout = DEFAULT_PAGE_TIMEOUT; + private TimeZone timeZone = Protocol.TIME_ZONE; + private int fetchSize = Protocol.FETCH_SIZE; + private TimeValue requestTimeout = Protocol.REQUEST_TIMEOUT; + private TimeValue pageTimeout = Protocol.PAGE_TIMEOUT; @Nullable private QueryBuilder filter = null; private List params = Collections.emptyList(); @@ -69,11 +66,10 @@ protected static ObjectParser objec parser.declareObjectArray(AbstractSqlQueryRequest::params, (p, c) -> SqlTypedParamValue.fromXContent(p), new ParseField("params")); parser.declareString((request, zoneId) -> request.timeZone(TimeZone.getTimeZone(zoneId)), new ParseField("time_zone")); parser.declareInt(AbstractSqlQueryRequest::fetchSize, new ParseField("fetch_size")); + parser.declareString((request, timeout) -> request.requestTimeout(TimeValue.parseTimeValue(timeout, Protocol.REQUEST_TIMEOUT, + "request_timeout")), new ParseField("request_timeout")); parser.declareString( - (request, timeout) -> request.requestTimeout(TimeValue.parseTimeValue(timeout, DEFAULT_REQUEST_TIMEOUT, "request_timeout")), - new ParseField("request_timeout")); - parser.declareString( - (request, timeout) -> request.pageTimeout(TimeValue.parseTimeValue(timeout, DEFAULT_PAGE_TIMEOUT, "page_timeout")), + (request, timeout) -> request.pageTimeout(TimeValue.parseTimeValue(timeout, Protocol.PAGE_TIMEOUT, "page_timeout")), new ParseField("page_timeout")); parser.declareObject(AbstractSqlQueryRequest::filter, (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), new ParseField("filter")); @@ -185,7 +181,7 @@ public QueryBuilder filter() { public AbstractSqlQueryRequest(StreamInput in) throws IOException { super(in); query = in.readString(); - params = in.readList(SqlTypedParamValue::new); + params = in.readList(AbstractSqlQueryRequest::readSqlTypedParamValue); timeZone = TimeZone.getTimeZone(in.readString()); fetchSize = in.readVInt(); requestTimeout = in.readTimeValue(); @@ -193,11 +189,23 @@ public AbstractSqlQueryRequest(StreamInput in) throws IOException { filter = in.readOptionalNamedWriteable(QueryBuilder.class); } + public static void writeSqlTypedParamValue(StreamOutput out, SqlTypedParamValue value) throws IOException { + out.writeEnum(value.dataType); + out.writeGenericValue(value.value); + } + + public static SqlTypedParamValue readSqlTypedParamValue(StreamInput in) throws IOException { + return new SqlTypedParamValue(in.readEnum(DataType.class), in.readGenericValue()); + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(query); - out.writeList(params); + out.writeVInt(params.size()); + for (SqlTypedParamValue param: params) { + writeSqlTypedParamValue(out, param); + } out.writeString(timeZone.getID()); out.writeVInt(fetchSize); out.writeTimeValue(requestTimeout); @@ -224,36 +232,4 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(super.hashCode(), query, timeZone, fetchSize, requestTimeout, pageTimeout, filter); } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (query != null) { - builder.field("query", query); - } - if (this.params.isEmpty() == false) { - builder.startArray("params"); - for (SqlTypedParamValue val : this.params) { - val.toXContent(builder, params); - } - builder.endArray(); - } - if (timeZone != null) { - builder.field("time_zone", timeZone.getID()); - } - if (fetchSize != DEFAULT_FETCH_SIZE) { - builder.field("fetch_size", fetchSize); - } - if (requestTimeout != DEFAULT_REQUEST_TIMEOUT) { - builder.field("request_timeout", requestTimeout.getStringRep()); - } - if (pageTimeout != DEFAULT_PAGE_TIMEOUT) { - builder.field("page_timeout", pageTimeout.getStringRep()); - } - if (filter != null) { - builder.field("filter"); - filter.toXContent(builder, params); - } - return builder; - } - } diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlRequest.java index bc4b1e81e44b3..2cb23f796d609 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlRequest.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlRequest.java @@ -10,9 +10,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.xpack.sql.proto.Mode; import java.io.IOException; -import java.util.Locale; import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -24,24 +24,6 @@ */ public abstract class AbstractSqlRequest extends ActionRequest implements ToXContent { - public enum Mode { - PLAIN, - JDBC; - - public static Mode fromString(String mode) { - if (mode == null) { - return PLAIN; - } - return Mode.valueOf(mode.toUpperCase(Locale.ROOT)); - } - - - @Override - public String toString() { - return this.name().toLowerCase(Locale.ROOT); - } - } - private Mode mode = Mode.PLAIN; protected AbstractSqlRequest() { diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatter.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatter.java index 9d9a9ea04a487..359652fa4f203 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatter.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatter.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; import java.io.IOException; import java.util.Arrays; @@ -28,19 +29,19 @@ public class CliFormatter implements Writeable { /** * Create a new {@linkplain CliFormatter} for formatting responses similar - * to the provided {@link SqlQueryResponse}. + * to the provided columns and rows. */ - public CliFormatter(SqlQueryResponse response) { + public CliFormatter(List columns, List> rows) { // Figure out the column widths: // 1. Start with the widths of the column names - width = new int[response.columns().size()]; + width = new int[columns.size()]; for (int i = 0; i < width.length; i++) { // TODO read the width from the data type? - width[i] = Math.max(MIN_COLUMN_WIDTH, response.columns().get(i).name().length()); + width[i] = Math.max(MIN_COLUMN_WIDTH, columns.get(i).name().length()); } // 2. Expand columns to fit the largest value - for (List row : response.rows()) { + for (List row : rows) { for (int i = 0; i < width.length; i++) { // TODO are we sure toString is correct here? What about dates that come back as longs. // Tracked by https://github.com/elastic/x-pack-elasticsearch/issues/3081 @@ -62,15 +63,15 @@ public void writeTo(StreamOutput out) throws IOException { * Format the provided {@linkplain SqlQueryResponse} for the CLI * including the header lines. */ - public String formatWithHeader(SqlQueryResponse response) { + public String formatWithHeader(List columns, List> rows) { // The header lines - StringBuilder sb = new StringBuilder(estimateSize(response.rows().size() + 2)); + StringBuilder sb = new StringBuilder(estimateSize(rows.size() + 2)); for (int i = 0; i < width.length; i++) { if (i > 0) { sb.append('|'); } - String name = response.columns().get(i).name(); + String name = columns.get(i).name(); // left padding int leftPadding = (width[i] - name.length()) / 2; for (int j = 0; j < leftPadding; j++) { @@ -98,19 +99,19 @@ public String formatWithHeader(SqlQueryResponse response) { /* Now format the results. Sadly, this means that column * widths are entirely determined by the first batch of * results. */ - return formatWithoutHeader(sb, response); + return formatWithoutHeader(sb, rows); } /** * Format the provided {@linkplain SqlQueryResponse} for the CLI * without the header lines. */ - public String formatWithoutHeader(SqlQueryResponse response) { - return formatWithoutHeader(new StringBuilder(estimateSize(response.rows().size())), response); + public String formatWithoutHeader(List> rows) { + return formatWithoutHeader(new StringBuilder(estimateSize(rows.size())), rows); } - private String formatWithoutHeader(StringBuilder sb, SqlQueryResponse response) { - for (List row : response.rows()) { + private String formatWithoutHeader(StringBuilder sb, List> rows) { + for (List row : rows) { for (int i = 0; i < width.length; i++) { if (i > 0) { sb.append('|'); @@ -138,7 +139,7 @@ private String formatWithoutHeader(StringBuilder sb, SqlQueryResponse response) } /** - * Pick a good estimate of the buffer size needed to contain the rows. + * Pick a good estimate of the buffer size needed to contain the rows. */ int estimateSize(int rows) { /* Each column has either a '|' or a '\n' after it diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/MetaColumnInfo.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/MetaColumnInfo.java deleted file mode 100644 index 72d5932f51137..0000000000000 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/MetaColumnInfo.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.plugin; - -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; - -import java.io.IOException; -import java.sql.JDBCType; -import java.util.Objects; - -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Information about a column returned by the listColumns response - */ -public class MetaColumnInfo implements Writeable, ToXContentObject { - - private static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>("column_info", true, objects -> - new MetaColumnInfo( - (String) objects[0], - (String) objects[1], - (String) objects[2], - objects[3] == null ? null : JDBCType.valueOf((int) objects[3]), - objects[4] == null ? 0 : (int) objects[4], - (int) objects[5])); - - private static final ParseField TABLE = new ParseField("table"); - private static final ParseField NAME = new ParseField("name"); - private static final ParseField ES_TYPE = new ParseField("type"); - private static final ParseField JDBC_TYPE = new ParseField("jdbc_type"); - private static final ParseField SIZE = new ParseField("size"); - private static final ParseField POSITION = new ParseField("position"); - - static { - PARSER.declareString(constructorArg(), TABLE); - PARSER.declareString(constructorArg(), NAME); - PARSER.declareString(constructorArg(), ES_TYPE); - PARSER.declareInt(optionalConstructorArg(), JDBC_TYPE); - PARSER.declareInt(optionalConstructorArg(), SIZE); - PARSER.declareInt(constructorArg(), POSITION); - } - - private final String table; - private final String name; - private final String esType; - @Nullable - private final JDBCType jdbcType; - private final int size; - private final int position; - - public MetaColumnInfo(String table, String name, String esType, JDBCType jdbcType, int size, int position) { - this.table = table; - this.name = name; - this.esType = esType; - this.jdbcType = jdbcType; - this.size = size; - this.position = position; - } - - public MetaColumnInfo(String table, String name, String esType, int position) { - this(table, name, esType, null, 0, position); - } - - MetaColumnInfo(StreamInput in) throws IOException { - table = in.readString(); - name = in.readString(); - esType = in.readString(); - if (in.readBoolean()) { - jdbcType = JDBCType.valueOf(in.readVInt()); - size = in.readVInt(); - } else { - jdbcType = null; - size = 0; - } - position = in.readVInt(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(table); - out.writeString(name); - out.writeString(esType); - if (jdbcType != null) { - out.writeBoolean(true); - out.writeVInt(jdbcType.getVendorTypeNumber()); - out.writeVInt(size); - } else { - out.writeBoolean(false); - } - out.writeVInt(position); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("table", table); - builder.field("name", name); - builder.field("type", esType); - if (jdbcType != null) { - builder.field("jdbc_type", jdbcType.getVendorTypeNumber()); - builder.field("size", size); - } - builder.field("position", position); - return builder.endObject(); - } - - - public static MetaColumnInfo fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * Name of the table. - */ - public String table() { - return table; - } - - /** - * Name of the column. - */ - public String name() { - return name; - } - - /** - * The type of the column in Elasticsearch. - */ - public String esType() { - return esType; - } - - /** - * The type of the column as it would be returned by a JDBC driver. - */ - public JDBCType jdbcType() { - return jdbcType; - } - - /** - * Precision - */ - public int size() { - return size; - } - - /** - * Column position with in the tables - */ - public int position() { - return position; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - MetaColumnInfo that = (MetaColumnInfo) o; - return size == that.size && - position == that.position && - Objects.equals(table, that.table) && - Objects.equals(name, that.name) && - Objects.equals(esType, that.esType) && - jdbcType == that.jdbcType; - } - - @Override - public int hashCode() { - return Objects.hash(table, name, esType, jdbcType, size, position); - } - -} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java index ed64fa2a41e57..f0b91640f981f 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java @@ -13,7 +13,6 @@ public class SqlClearCursorAction public static final SqlClearCursorAction INSTANCE = new SqlClearCursorAction(); public static final String NAME = "indices:data/read/sql/close_cursor"; - public static final String REST_ENDPOINT = "/_xpack/sql/close"; private SqlClearCursorAction() { super(NAME); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequest.java index 0dfb9f71e38f1..45dda28588726 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequest.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequest.java @@ -10,9 +10,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.sql.proto.Mode; import java.io.IOException; import java.util.Objects; @@ -23,13 +23,13 @@ /** * Request to clean all SQL resources associated with the cursor */ -public class SqlClearCursorRequest extends AbstractSqlRequest implements ToXContentObject { +public class SqlClearCursorRequest extends AbstractSqlRequest { private static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(SqlClearCursorAction.NAME, true, (objects, mode) -> new SqlClearCursorRequest( - mode, - (String) objects[0] - )); + new ConstructingObjectParser<>(SqlClearCursorAction.NAME, true, (objects, mode) -> new SqlClearCursorRequest( + mode, + (String) objects[0] + )); static { PARSER.declareString(constructorArg(), new ParseField("cursor")); @@ -96,13 +96,11 @@ public int hashCode() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("cursor", cursor); - builder.endObject(); - return builder; + // This is needed just to test round-trip compatibility with proto.SqlClearCursorRequest + return new org.elasticsearch.xpack.sql.proto.SqlClearCursorRequest(mode(), cursor).toXContent(builder, params); } public static SqlClearCursorRequest fromXContent(XContentParser parser, Mode mode) { return PARSER.apply(parser, mode); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponse.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponse.java index b157d65dfff84..3bb3df9a47ffd 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponse.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponse.java @@ -6,13 +6,10 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -20,20 +17,13 @@ import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; +import static org.elasticsearch.xpack.sql.proto.SqlClearCursorResponse.SUCCEEDED; /** * Response to the request to clean all SQL resources associated with the cursor */ public class SqlClearCursorResponse extends ActionResponse implements StatusToXContentObject { - private static final ParseField SUCCEEDED = new ParseField("succeeded"); - public static final ObjectParser PARSER = - new ObjectParser<>(SqlClearCursorAction.NAME, true, SqlClearCursorResponse::new); - static { - PARSER.declareBoolean(SqlClearCursorResponse::setSucceeded, SUCCEEDED); - } - - private boolean succeeded; public SqlClearCursorResponse(boolean succeeded) { @@ -93,9 +83,4 @@ public int hashCode() { return Objects.hash(succeeded); } - public static SqlClearCursorResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - } diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java index fd46799608c73..cbcf626adad55 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java @@ -12,7 +12,6 @@ public class SqlQueryAction extends Action PARSER = objectParser(SqlQueryRequest::new); public static final ParseField CURSOR = new ParseField("cursor"); @@ -37,7 +38,7 @@ public class SqlQueryRequest extends AbstractSqlQueryRequest implements ToXConte static { PARSER.declareString(SqlQueryRequest::cursor, CURSOR); PARSER.declareObject(SqlQueryRequest::filter, - (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), FILTER); + (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), FILTER); } private String cursor = ""; @@ -108,24 +109,15 @@ public String getDescription() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - super.toXContent(builder, params); - if (cursor != null) { - builder.field("cursor", cursor); - } - builder.endObject(); - return builder; - } - - @Override - public boolean isFragment() { - return false; + // This is needed just to test round-trip compatibility with proto.SqlQueryRequest + return new org.elasticsearch.xpack.sql.proto.SqlQueryRequest(mode(), query(), params(), timeZone(), fetchSize(), + requestTimeout(), pageTimeout(), filter(), cursor()).toXContent(builder, params); } public static SqlQueryRequest fromXContent(XContentParser parser, Mode mode) { - SqlQueryRequest request = PARSER.apply(parser, null); + SqlQueryRequest request = PARSER.apply(parser, null); request.mode(mode); return request; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestBuilder.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestBuilder.java index a08af6f8ce4b0..1eddd09d89d35 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestBuilder.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestBuilder.java @@ -9,25 +9,22 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest.Mode; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.Protocol; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.util.Collections; import java.util.List; import java.util.TimeZone; -import static org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest.DEFAULT_FETCH_SIZE; -import static org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest.DEFAULT_PAGE_TIMEOUT; -import static org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest.DEFAULT_REQUEST_TIMEOUT; -import static org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest.DEFAULT_TIME_ZONE; - /** * The builder to build sql request */ public class SqlQueryRequestBuilder extends ActionRequestBuilder { public SqlQueryRequestBuilder(ElasticsearchClient client, SqlQueryAction action) { - this(client, action, "", Collections.emptyList(), null, DEFAULT_TIME_ZONE, DEFAULT_FETCH_SIZE, DEFAULT_REQUEST_TIMEOUT, - DEFAULT_PAGE_TIMEOUT, "", Mode.PLAIN); + this(client, action, "", Collections.emptyList(), null, Protocol.TIME_ZONE, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, + Protocol.PAGE_TIMEOUT, "", Mode.PLAIN); } public SqlQueryRequestBuilder(ElasticsearchClient client, SqlQueryAction action, String query, List params, diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponse.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponse.java index e0de05cd77438..118ba81f82df0 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponse.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponse.java @@ -7,49 +7,28 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; +import org.elasticsearch.xpack.sql.proto.Mode; import org.joda.time.ReadableDateTime; import java.io.IOException; +import java.sql.JDBCType; import java.util.ArrayList; import java.util.List; import java.util.Objects; import static java.util.Collections.unmodifiableList; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.common.xcontent.XContentParserUtils.parseFieldsValue; /** * Response to perform an sql query */ public class SqlQueryResponse extends ActionResponse implements ToXContentObject { - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("sql", true, - objects -> new SqlQueryResponse( - objects[0] == null ? "" : (String) objects[0], - (List) objects[1], - (List>) objects[2])); - - public static final ParseField CURSOR = new ParseField("cursor"); - public static final ParseField COLUMNS = new ParseField("columns"); - public static final ParseField ROWS = new ParseField("rows"); - - static { - PARSER.declareString(optionalConstructorArg(), CURSOR); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ColumnInfo.fromXContent(p), COLUMNS); - PARSER.declareField(constructorArg(), (p, c) -> parseRows(p), ROWS, ValueType.OBJECT_ARRAY); - } - // TODO: Simplify cursor handling private String cursor; private List columns; @@ -109,7 +88,7 @@ public void readFrom(StreamInput in) throws IOException { int columnCount = in.readVInt(); List columns = new ArrayList<>(columnCount); for (int c = 0; c < columnCount; c++) { - columns.add(new ColumnInfo(in)); + columns.add(readColumnInfo(in)); } this.columns = unmodifiableList(columns); } else { @@ -139,7 +118,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(true); out.writeVInt(columns.size()); for (ColumnInfo column : columns) { - column.writeTo(out); + writeColumnInfo(out, column); } } out.writeVInt(rows.size()); @@ -155,7 +134,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - AbstractSqlRequest.Mode mode = AbstractSqlRequest.Mode.fromString(params.param("mode")); + Mode mode = Mode.fromString(params.param("mode")); builder.startObject(); { if (columns != null) { @@ -187,8 +166,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws /** * Serializes the provided value in SQL-compatible way based on the client mode */ - public static XContentBuilder value(XContentBuilder builder, AbstractSqlRequest.Mode mode, Object value) throws IOException { - if (mode == AbstractSqlRequest.Mode.JDBC && value instanceof ReadableDateTime) { + public static XContentBuilder value(XContentBuilder builder, Mode mode, Object value) throws IOException { + if (mode == Mode.JDBC && value instanceof ReadableDateTime) { // JDBC cannot parse dates in string format builder.value(((ReadableDateTime) value).getMillis()); } else { @@ -197,34 +176,33 @@ public static XContentBuilder value(XContentBuilder builder, AbstractSqlRequest. return builder; } - public static SqlQueryResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); + public static ColumnInfo readColumnInfo(StreamInput in) throws IOException { + String table = in.readString(); + String name = in.readString(); + String esType = in.readString(); + JDBCType jdbcType; + int displaySize; + if (in.readBoolean()) { + jdbcType = JDBCType.valueOf(in.readVInt()); + displaySize = in.readVInt(); + } else { + jdbcType = null; + displaySize = 0; + } + return new ColumnInfo(table, name, esType, jdbcType, displaySize); } - public static List> parseRows(XContentParser parser) throws IOException { - List> list = new ArrayList<>(); - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - if (parser.currentToken() == XContentParser.Token.START_ARRAY) { - list.add(parseRow(parser)); - } else { - throw new IllegalStateException("expected start array but got [" + parser.currentToken() + "]"); - } - } - return list; - } - - public static List parseRow(XContentParser parser) throws IOException { - List list = new ArrayList<>(); - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - if (parser.currentToken().isValue()) { - list.add(parseFieldsValue(parser)); - } else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { - list.add(null); - } else { - throw new IllegalStateException("expected value but got [" + parser.currentToken() + "]"); - } + public static void writeColumnInfo(StreamOutput out, ColumnInfo columnInfo) throws IOException { + out.writeString(columnInfo.table()); + out.writeString(columnInfo.name()); + out.writeString(columnInfo.esType()); + if (columnInfo.jdbcType() != null) { + out.writeBoolean(true); + out.writeVInt(columnInfo.jdbcType().getVendorTypeNumber()); + out.writeVInt(columnInfo.displaySize()); + } else { + out.writeBoolean(false); } - return list; } @Override diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequest.java index 93e0630745100..103bfe5fddd69 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequest.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequest.java @@ -10,8 +10,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.io.IOException; import java.util.List; @@ -56,4 +59,14 @@ public static SqlTranslateRequest fromXContent(XContentParser parser, Mode mode) request.mode(mode); return request; } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + // This is needed just to test parsing of SqlTranslateRequest, so we can reuse SqlQuerySerialization + return new org.elasticsearch.xpack.sql.proto.SqlQueryRequest(mode(), query(), params(), timeZone(), fetchSize(), + requestTimeout(), pageTimeout(), filter(), null).toXContent(builder, params); + + } + + } diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestBuilder.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestBuilder.java index 11adc975014ca..d6d97c19297de 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestBuilder.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestBuilder.java @@ -9,27 +9,25 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.Protocol; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.util.Collections; import java.util.List; import java.util.TimeZone; -import static org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest.DEFAULT_FETCH_SIZE; -import static org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest.DEFAULT_PAGE_TIMEOUT; -import static org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest.DEFAULT_REQUEST_TIMEOUT; -import static org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest.DEFAULT_TIME_ZONE; - /** * Builder for the request for the sql action for translating SQL queries into ES requests */ public class SqlTranslateRequestBuilder extends ActionRequestBuilder { public SqlTranslateRequestBuilder(ElasticsearchClient client, SqlTranslateAction action) { - this(client, action, AbstractSqlRequest.Mode.PLAIN, null, null, Collections.emptyList(), DEFAULT_TIME_ZONE, DEFAULT_FETCH_SIZE, - DEFAULT_REQUEST_TIMEOUT, DEFAULT_PAGE_TIMEOUT); + this(client, action, Mode.PLAIN, null, null, Collections.emptyList(), Protocol.TIME_ZONE, Protocol.FETCH_SIZE, + Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT); } - public SqlTranslateRequestBuilder(ElasticsearchClient client, SqlTranslateAction action, AbstractSqlRequest.Mode mode, String query, + public SqlTranslateRequestBuilder(ElasticsearchClient client, SqlTranslateAction action, Mode mode, String query, QueryBuilder filter, List params, TimeZone timeZone, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout) { super(client, action, new SqlTranslateRequest(mode, query, params, filter, timeZone, fetchSize, requestTimeout, pageTimeout)); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/AbstractSqlRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/AbstractSqlRequest.java new file mode 100644 index 0000000000000..2001aecdac5d8 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/AbstractSqlRequest.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.proto; + +import org.elasticsearch.common.xcontent.ToXContentFragment; + +import java.util.Objects; + +/** + * Base request for all SQL-related requests for JDBC/CLI client + *

+ * Contains information about the client mode that can be used to generate different responses based on the caller type. + */ +public abstract class AbstractSqlRequest implements ToXContentFragment { + + private final Mode mode; + + protected AbstractSqlRequest(Mode mode) { + this.mode = mode; + } + + public Mode mode() { + return mode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AbstractSqlRequest that = (AbstractSqlRequest) o; + return mode == that.mode; + } + + @Override + public int hashCode() { + return Objects.hash(mode); + } + +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/ColumnInfo.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java similarity index 70% rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/ColumnInfo.java rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java index 5c12c776dd198..ad2f687ae0bef 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/ColumnInfo.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java @@ -3,14 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.plugin; +package org.elasticsearch.xpack.sql.proto; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -26,16 +23,16 @@ /** * Information about a column returned with first query response */ -public final class ColumnInfo implements Writeable, ToXContentObject { +public class ColumnInfo implements ToXContentObject { private static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>("column_info", true, objects -> - new ColumnInfo( - objects[0] == null ? "" : (String) objects[0], - (String) objects[1], - (String) objects[2], - objects[3] == null ? null : JDBCType.valueOf((int) objects[3]), - objects[4] == null ? 0 : (int) objects[4])); + new ConstructingObjectParser<>("column_info", true, objects -> + new ColumnInfo( + objects[0] == null ? "" : (String) objects[0], + (String) objects[1], + (String) objects[2], + objects[3] == null ? null : JDBCType.valueOf((int) objects[3]), + objects[4] == null ? 0 : (int) objects[4])); private static final ParseField TABLE = new ParseField("table"); private static final ParseField NAME = new ParseField("name"); @@ -74,33 +71,6 @@ public ColumnInfo(String table, String name, String esType) { this.displaySize = 0; } - ColumnInfo(StreamInput in) throws IOException { - table = in.readString(); - name = in.readString(); - esType = in.readString(); - if (in.readBoolean()) { - jdbcType = JDBCType.valueOf(in.readVInt()); - displaySize = in.readVInt(); - } else { - jdbcType = null; - displaySize = 0; - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(table); - out.writeString(name); - out.writeString(esType); - if (jdbcType != null) { - out.writeBoolean(true); - out.writeVInt(jdbcType.getVendorTypeNumber()); - out.writeVInt(displaySize); - } else { - out.writeBoolean(false); - } - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -162,10 +132,10 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ColumnInfo that = (ColumnInfo) o; return displaySize == that.displaySize && - Objects.equals(table, that.table) && - Objects.equals(name, that.name) && - Objects.equals(esType, that.esType) && - jdbcType == that.jdbcType; + Objects.equals(table, that.table) && + Objects.equals(name, that.name) && + Objects.equals(esType, that.esType) && + jdbcType == that.jdbcType; } @Override diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/MainResponse.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/MainResponse.java new file mode 100644 index 0000000000000..73b6cbc529ec6 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/MainResponse.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.proto; + +import org.elasticsearch.Build; +import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.util.Objects; + +/** + * Main (/) response for JDBC/CLI client + */ +public class MainResponse { + private String nodeName; + // TODO: Add parser for Version + private Version version; + private String clusterName; + private String clusterUuid; + // TODO: Add parser for Build + private Build build; + + private MainResponse() { + } + + public MainResponse(String nodeName, Version version, String clusterName, String clusterUuid, Build build) { + this.nodeName = nodeName; + this.version = version; + this.clusterName = clusterName; + this.clusterUuid = clusterUuid; + this.build = build; + } + + public String getNodeName() { + return nodeName; + } + + public Version getVersion() { + return version; + } + + public String getClusterName() { + return clusterName; + } + + public String getClusterUuid() { + return clusterUuid; + } + + public Build getBuild() { + return build; + } + + private static final ObjectParser PARSER = new ObjectParser<>(MainResponse.class.getName(), true, + MainResponse::new); + + static { + PARSER.declareString((response, value) -> response.nodeName = value, new ParseField("name")); + PARSER.declareString((response, value) -> response.clusterName = value, new ParseField("cluster_name")); + PARSER.declareString((response, value) -> response.clusterUuid = value, new ParseField("cluster_uuid")); + PARSER.declareString((response, value) -> { + }, new ParseField("tagline")); + PARSER.declareObject((response, value) -> { + final String buildFlavor = (String) value.get("build_flavor"); + final String buildType = (String) value.get("build_type"); + response.build = + new Build( + buildFlavor == null ? Build.Flavor.UNKNOWN : Build.Flavor.fromDisplayName(buildFlavor), + buildType == null ? Build.Type.UNKNOWN : Build.Type.fromDisplayName(buildType), + (String) value.get("build_hash"), + (String) value.get("build_date"), + (boolean) value.get("build_snapshot")); + response.version = Version.fromString((String) value.get("number")); + }, (parser, context) -> parser.map(), new ParseField("version")); + } + + public static MainResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MainResponse other = (MainResponse) o; + return Objects.equals(nodeName, other.nodeName) && + Objects.equals(version, other.version) && + Objects.equals(clusterUuid, other.clusterUuid) && + Objects.equals(build, other.build) && + Objects.equals(clusterName, other.clusterName); + } + + @Override + public int hashCode() { + return Objects.hash(nodeName, version, clusterUuid, build, clusterName); + } +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java new file mode 100644 index 0000000000000..02f175ca80d79 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.proto; + +import java.util.Locale; + +/** + * SQL protocol mode + */ +public enum Mode { + PLAIN, + JDBC; + + public static Mode fromString(String mode) { + if (mode == null) { + return PLAIN; + } + return Mode.valueOf(mode.toUpperCase(Locale.ROOT)); + } + + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java new file mode 100644 index 0000000000000..a61978828c80c --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.proto; + +import org.elasticsearch.common.unit.TimeValue; + +import java.util.TimeZone; + +/** + * Sql protocol defaults and end-points shared between JDBC and REST protocol implementations + */ +public final class Protocol { + public static final TimeZone TIME_ZONE = TimeZone.getTimeZone("UTC"); + + /** + * Global choice for the default fetch size. + */ + public static final int FETCH_SIZE = 1000; + public static final TimeValue REQUEST_TIMEOUT = TimeValue.timeValueSeconds(90); + public static final TimeValue PAGE_TIMEOUT = TimeValue.timeValueSeconds(45); + + /** + * SQL-related endpoints + */ + public static final String CLEAR_CURSOR_REST_ENDPOINT = "/_xpack/sql/close"; + public static final String SQL_QUERY_REST_ENDPOINT = "/_xpack/sql"; +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorRequest.java new file mode 100644 index 0000000000000..310dde4430210 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorRequest.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.proto; + +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Request to clean all SQL resources associated with the cursor for JDBC/CLI client + */ +public class SqlClearCursorRequest extends AbstractSqlRequest { + + private final String cursor; + + public SqlClearCursorRequest(Mode mode, String cursor) { + super(mode); + this.cursor = cursor; + } + + public String getCursor() { + return cursor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + SqlClearCursorRequest that = (SqlClearCursorRequest) o; + return Objects.equals(cursor, that.cursor); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), cursor); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("cursor", cursor); + return builder; + } +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorResponse.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorResponse.java new file mode 100644 index 0000000000000..b56a8335d20d5 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorResponse.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.proto; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Response to the request to clean all SQL resources associated with the cursor for JDBC/CLI client + */ +public class SqlClearCursorResponse { + + public static final ParseField SUCCEEDED = new ParseField("succeeded"); + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>(SqlClearCursorResponse.class.getName(), true, + objects -> new SqlClearCursorResponse(objects[0] == null ? false : (boolean) objects[0])); + + static { + PARSER.declareBoolean(optionalConstructorArg(), SUCCEEDED); + } + + + private final boolean succeeded; + + public SqlClearCursorResponse(boolean succeeded) { + this.succeeded = succeeded; + } + + /** + * @return Whether the attempt to clear a cursor was successful. + */ + public boolean isSucceeded() { + return succeeded; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlClearCursorResponse response = (SqlClearCursorResponse) o; + return succeeded == response.succeeded; + } + + @Override + public int hashCode() { + return Objects.hash(succeeded); + } + + public static SqlClearCursorResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java new file mode 100644 index 0000000000000..00a1696a05f60 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.proto; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.TimeZone; + +/** + * Sql query request for JDBC/CLI client + */ +public class SqlQueryRequest extends AbstractSqlRequest { + @Nullable + private final String cursor; + private final String query; + private final TimeZone timeZone; + private final int fetchSize; + private final TimeValue requestTimeout; + private final TimeValue pageTimeout; + @Nullable + private final ToXContent filter; + private final List params; + + + public SqlQueryRequest(Mode mode, String query, List params, TimeZone timeZone, + int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, ToXContent filter, String cursor) { + super(mode); + this.query = query; + this.params = params; + this.timeZone = timeZone; + this.fetchSize = fetchSize; + this.requestTimeout = requestTimeout; + this.pageTimeout = pageTimeout; + this.filter = filter; + this.cursor = cursor; + } + + public SqlQueryRequest(Mode mode, String query, List params, ToXContent filter, TimeZone timeZone, + int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout) { + this(mode, query, params, timeZone, fetchSize, requestTimeout, pageTimeout, filter, null); + } + + public SqlQueryRequest(Mode mode, String cursor, TimeValue requestTimeout, TimeValue pageTimeout) { + this(mode, "", Collections.emptyList(), Protocol.TIME_ZONE, Protocol.FETCH_SIZE, requestTimeout, pageTimeout, null, cursor); + } + + + /** + * The key that must be sent back to SQL to access the next page of + * results. + */ + public String cursor() { + return cursor; + } + + /** + * Text of SQL query + */ + public String query() { + return query; + } + + /** + * An optional list of parameters if the SQL query is parametrized + */ + public List params() { + return params; + } + + /** + * The client's time zone + */ + public TimeZone timeZone() { + return timeZone; + } + + + /** + * Hint about how many results to fetch at once. + */ + public int fetchSize() { + return fetchSize; + } + + /** + * The timeout specified on the search request + */ + public TimeValue requestTimeout() { + return requestTimeout; + } + + /** + * The scroll timeout + */ + public TimeValue pageTimeout() { + return pageTimeout; + } + + /** + * An optional Query DSL defined query that can added as a filter on the top of the SQL query + */ + public ToXContent filter() { + return filter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + SqlQueryRequest that = (SqlQueryRequest) o; + return fetchSize == that.fetchSize && + Objects.equals(query, that.query) && + Objects.equals(params, that.params) && + Objects.equals(timeZone, that.timeZone) && + Objects.equals(requestTimeout, that.requestTimeout) && + Objects.equals(pageTimeout, that.pageTimeout) && + Objects.equals(filter, that.filter) && + Objects.equals(cursor, that.cursor); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), query, timeZone, fetchSize, requestTimeout, pageTimeout, filter, cursor); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (query != null) { + builder.field("query", query); + } + if (this.params.isEmpty() == false) { + builder.startArray("params"); + for (SqlTypedParamValue val : this.params) { + val.toXContent(builder, params); + } + builder.endArray(); + } + if (timeZone != null) { + builder.field("time_zone", timeZone.getID()); + } + if (fetchSize != Protocol.FETCH_SIZE) { + builder.field("fetch_size", fetchSize); + } + if (requestTimeout != Protocol.REQUEST_TIMEOUT) { + builder.field("request_timeout", requestTimeout.getStringRep()); + } + if (pageTimeout != Protocol.PAGE_TIMEOUT) { + builder.field("page_timeout", pageTimeout.getStringRep()); + } + if (filter != null) { + builder.field("filter"); + filter.toXContent(builder, params); + } + if (cursor != null) { + builder.field("cursor", cursor); + } + return builder; + } + +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java new file mode 100644 index 0000000000000..8937261237c7f --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.proto; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseFieldsValue; + +/** + * Response to perform an sql query for JDBC/CLI client + */ +public class SqlQueryResponse { + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("sql", true, + objects -> new SqlQueryResponse( + objects[0] == null ? "" : (String) objects[0], + (List) objects[1], + (List>) objects[2])); + + public static final ParseField CURSOR = new ParseField("cursor"); + public static final ParseField COLUMNS = new ParseField("columns"); + public static final ParseField ROWS = new ParseField("rows"); + + static { + PARSER.declareString(optionalConstructorArg(), CURSOR); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ColumnInfo.fromXContent(p), COLUMNS); + PARSER.declareField(constructorArg(), (p, c) -> parseRows(p), ROWS, ValueType.OBJECT_ARRAY); + } + + // TODO: Simplify cursor handling + private final String cursor; + private final List columns; + // TODO investigate reusing Page here - it probably is much more efficient + private final List> rows; + + public SqlQueryResponse(String cursor, @Nullable List columns, List> rows) { + this.cursor = cursor; + this.columns = columns; + this.rows = rows; + } + + /** + * The key that must be sent back to SQL to access the next page of + * results. If equal to "" then there is no next page. + */ + public String cursor() { + return cursor; + } + + public long size() { + return rows.size(); + } + + public List columns() { + return columns; + } + + public List> rows() { + return rows; + } + + public static SqlQueryResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + public static List> parseRows(XContentParser parser) throws IOException { + List> list = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + if (parser.currentToken() == XContentParser.Token.START_ARRAY) { + list.add(parseRow(parser)); + } else { + throw new IllegalStateException("expected start array but got [" + parser.currentToken() + "]"); + } + } + return list; + } + + public static List parseRow(XContentParser parser) throws IOException { + List list = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + if (parser.currentToken().isValue()) { + list.add(parseFieldsValue(parser)); + } else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + list.add(null); + } else { + throw new IllegalStateException("expected value but got [" + parser.currentToken() + "]"); + } + } + return list; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlQueryResponse that = (SqlQueryResponse) o; + return Objects.equals(cursor, that.cursor) && + Objects.equals(columns, that.columns) && + Objects.equals(rows, that.rows); + } + + @Override + public int hashCode() { + return Objects.hash(cursor, columns, rows); + } + +} diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTypedParamValue.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlTypedParamValue.java similarity index 76% rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTypedParamValue.java rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlTypedParamValue.java index ffde82fab3491..a85b66b80a34d 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTypedParamValue.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlTypedParamValue.java @@ -3,12 +3,9 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.plugin; +package org.elasticsearch.xpack.sql.proto; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -25,12 +22,12 @@ /** * Represent a strongly typed parameter value */ -public class SqlTypedParamValue implements ToXContentObject, Writeable { +public class SqlTypedParamValue implements ToXContentObject { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("params", true, objects -> new SqlTypedParamValue( - objects[0], - DataType.fromEsType((String) objects[1]))); + DataType.fromEsType((String) objects[1]), objects[0] + )); private static final ParseField VALUE = new ParseField("value"); private static final ParseField TYPE = new ParseField("type"); @@ -43,7 +40,7 @@ public class SqlTypedParamValue implements ToXContentObject, Writeable { public final Object value; public final DataType dataType; - public SqlTypedParamValue(Object value, DataType dataType) { + public SqlTypedParamValue(DataType dataType, Object value) { this.value = value; this.dataType = dataType; } @@ -61,17 +58,6 @@ public static SqlTypedParamValue fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeEnum(dataType); - out.writeGenericValue(value); - } - - public SqlTypedParamValue(StreamInput in) throws IOException { - dataType = in.readEnum(DataType.class); - value = in.readGenericValue(); - } - @Override public boolean equals(Object o) { if (this == o) { @@ -94,4 +80,4 @@ public int hashCode() { public String toString() { return String.valueOf(value) + "[" + dataType + "]"; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestTests.java b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestTests.java index 83546924a38f8..e479ae8b4f1ea 100644 --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestTests.java +++ b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestTests.java @@ -8,17 +8,18 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.sql.proto.Mode; import org.junit.Before; import java.io.IOException; import java.util.function.Consumer; public class SqlClearCursorRequestTests extends AbstractSerializingTestCase { - public AbstractSqlRequest.Mode testMode; + public Mode testMode; @Before public void setup() { - testMode = randomFrom(AbstractSqlRequest.Mode.values()); + testMode = randomFrom(Mode.values()); } @Override @@ -40,7 +41,7 @@ protected SqlClearCursorRequest doParseInstance(XContentParser parser) { protected SqlClearCursorRequest mutateInstance(SqlClearCursorRequest instance) throws IOException { @SuppressWarnings("unchecked") Consumer mutator = randomFrom( - request -> request.mode(randomValueOtherThan(request.mode(), () -> randomFrom(AbstractSqlRequest.Mode.values()))), + request -> request.mode(randomValueOtherThan(request.mode(), () -> randomFrom(Mode.values()))), request -> request.setCursor(randomValueOtherThan(request.getCursor(), SqlQueryResponseTests::randomStringCursor)) ); SqlClearCursorRequest newRequest = new SqlClearCursorRequest(instance.mode(), instance.getCursor()); diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponseTests.java b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponseTests.java index 0ef2875d8e7dd..94964428bb4f3 100644 --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponseTests.java +++ b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponseTests.java @@ -27,6 +27,8 @@ protected SqlClearCursorResponse mutateInstance(SqlClearCursorResponse instance) @Override protected SqlClearCursorResponse doParseInstance(XContentParser parser) { - return SqlClearCursorResponse.fromXContent(parser); + org.elasticsearch.xpack.sql.proto.SqlClearCursorResponse response = + org.elasticsearch.xpack.sql.proto.SqlClearCursorResponse.fromXContent(parser); + return new SqlClearCursorResponse(response.isSucceeded()); } } diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestTests.java b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestTests.java index 5fbe4e42d48f2..0e4a183ab1626 100644 --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestTests.java +++ b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestTests.java @@ -14,6 +14,8 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.type.DataType; import org.junit.Before; @@ -28,11 +30,11 @@ public class SqlQueryRequestTests extends AbstractSerializingTestCase { - public AbstractSqlRequest.Mode testMode; + public Mode testMode; @Before public void setup() { - testMode = randomFrom(AbstractSqlRequest.Mode.values()); + testMode = randomFrom(Mode.values()); } @Override @@ -63,11 +65,11 @@ public List randomParameters() { List arr = new ArrayList<>(len); for (int i = 0; i < len; i++) { @SuppressWarnings("unchecked") Supplier supplier = randomFrom( - () -> new SqlTypedParamValue(randomBoolean(), DataType.BOOLEAN), - () -> new SqlTypedParamValue(randomLong(), DataType.LONG), - () -> new SqlTypedParamValue(randomDouble(), DataType.DOUBLE), - () -> new SqlTypedParamValue(null, DataType.NULL), - () -> new SqlTypedParamValue(randomAlphaOfLength(10), DataType.KEYWORD) + () -> new SqlTypedParamValue(DataType.BOOLEAN, randomBoolean()), + () -> new SqlTypedParamValue(DataType.LONG, randomLong()), + () -> new SqlTypedParamValue(DataType.DOUBLE, randomDouble()), + () -> new SqlTypedParamValue(DataType.NULL, null), + () -> new SqlTypedParamValue(DataType.KEYWORD, randomAlphaOfLength(10)) ); arr.add(supplier.get()); } @@ -93,7 +95,7 @@ protected SqlQueryRequest doParseInstance(XContentParser parser) { protected SqlQueryRequest mutateInstance(SqlQueryRequest instance) { @SuppressWarnings("unchecked") Consumer mutator = randomFrom( - request -> request.mode(randomValueOtherThan(request.mode(), () -> randomFrom(AbstractSqlRequest.Mode.values()))), + request -> request.mode(randomValueOtherThan(request.mode(), () -> randomFrom(Mode.values()))), request -> request.query(randomValueOtherThan(request.query(), () -> randomAlphaOfLength(5))), request -> request.params(randomValueOtherThan(request.params(), this::randomParameters)), request -> request.timeZone(randomValueOtherThan(request.timeZone(), ESTestCase::randomTimeZone)), diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponseTests.java b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponseTests.java index 42c08bb09142f..bc5e5ae2a0180 100644 --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponseTests.java +++ b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponseTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; import java.io.IOException; import java.sql.JDBCType; @@ -114,6 +115,8 @@ public void testToXContent() throws IOException { @Override protected SqlQueryResponse doParseInstance(XContentParser parser) { - return SqlQueryResponse.fromXContent(parser); + org.elasticsearch.xpack.sql.proto.SqlQueryResponse response = + org.elasticsearch.xpack.sql.proto.SqlQueryResponse.fromXContent(parser); + return new SqlQueryResponse(response.cursor(), response.columns(), response.rows()); } } diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestTests.java b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestTests.java index 21b002293768f..2eb3d71bbf410 100644 --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestTests.java +++ b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.proto.Mode; import org.junit.Before; import java.io.IOException; @@ -25,11 +26,11 @@ public class SqlTranslateRequestTests extends AbstractSerializingTestCase { - public AbstractSqlRequest.Mode testMode; + public Mode testMode; @Before public void setup() { - testMode = randomFrom(AbstractSqlRequest.Mode.values()); + testMode = randomFrom(Mode.values()); } @Override @@ -71,7 +72,7 @@ protected SqlTranslateRequest mutateInstance(SqlTranslateRequest instance) throw request -> request.query(randomValueOtherThan(request.query(), () -> randomAlphaOfLength(5))), request -> request.timeZone(randomValueOtherThan(request.timeZone(), ESTestCase::randomTimeZone)), request -> request.fetchSize(randomValueOtherThan(request.fetchSize(), () -> between(1, Integer.MAX_VALUE))), - request -> request.requestTimeout(randomValueOtherThan(request.requestTimeout(), () -> randomTV())), + request -> request.requestTimeout(randomValueOtherThan(request.requestTimeout(), this::randomTV)), request -> request.filter(randomValueOtherThan(request.filter(), () -> request.filter() == null ? randomFilter(random()) : randomFilterOrNull(random()))) ); diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java b/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java index bf7c245b24cbe..8f77d5397e948 100644 --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java +++ b/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.sql.client; -import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -22,13 +21,14 @@ import org.elasticsearch.xpack.sql.client.shared.ConnectionConfiguration; import org.elasticsearch.xpack.sql.client.shared.JreHttpUrlConnection; import org.elasticsearch.xpack.sql.client.shared.JreHttpUrlConnection.ResponseOrException; -import org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest; -import org.elasticsearch.xpack.sql.plugin.SqlClearCursorAction; -import org.elasticsearch.xpack.sql.plugin.SqlClearCursorRequest; -import org.elasticsearch.xpack.sql.plugin.SqlClearCursorResponse; -import org.elasticsearch.xpack.sql.plugin.SqlQueryAction; -import org.elasticsearch.xpack.sql.plugin.SqlQueryRequest; -import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; +import org.elasticsearch.xpack.sql.proto.AbstractSqlRequest; +import org.elasticsearch.xpack.sql.proto.MainResponse; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.Protocol; +import org.elasticsearch.xpack.sql.proto.SqlClearCursorRequest; +import org.elasticsearch.xpack.sql.proto.SqlClearCursorResponse; +import org.elasticsearch.xpack.sql.proto.SqlQueryRequest; +import org.elasticsearch.xpack.sql.proto.SqlQueryResponse; import java.io.IOException; import java.io.InputStream; @@ -50,7 +50,7 @@ public class HttpClient { private final ConnectionConfiguration cfg; - public HttpClient(ConnectionConfiguration cfg) throws SQLException { + public HttpClient(ConnectionConfiguration cfg) { this.cfg = cfg; } @@ -66,26 +66,25 @@ public MainResponse serverInfo() throws SQLException { public SqlQueryResponse queryInit(String query, int fetchSize) throws SQLException { // TODO allow customizing the time zone - this is what session set/reset/get should be about - SqlQueryRequest sqlRequest = new SqlQueryRequest(AbstractSqlRequest.Mode.PLAIN, query, Collections.emptyList(), null, + SqlQueryRequest sqlRequest = new SqlQueryRequest(Mode.PLAIN, query, Collections.emptyList(), null, TimeZone.getTimeZone("UTC"), fetchSize, TimeValue.timeValueMillis(cfg.queryTimeout()), - TimeValue.timeValueMillis(cfg.pageTimeout()), "" - ); + TimeValue.timeValueMillis(cfg.pageTimeout())); return query(sqlRequest); } public SqlQueryResponse query(SqlQueryRequest sqlRequest) throws SQLException { - return post(SqlQueryAction.REST_ENDPOINT, sqlRequest, SqlQueryResponse::fromXContent); + return post(Protocol.SQL_QUERY_REST_ENDPOINT, sqlRequest, SqlQueryResponse::fromXContent); } public SqlQueryResponse nextPage(String cursor) throws SQLException { - SqlQueryRequest sqlRequest = new SqlQueryRequest(); - sqlRequest.cursor(cursor); - return post(SqlQueryAction.REST_ENDPOINT, sqlRequest, SqlQueryResponse::fromXContent); + SqlQueryRequest sqlRequest = new SqlQueryRequest(Mode.PLAIN, cursor, TimeValue.timeValueMillis(cfg.queryTimeout()), + TimeValue.timeValueMillis(cfg.pageTimeout())); + return post(Protocol.SQL_QUERY_REST_ENDPOINT, sqlRequest, SqlQueryResponse::fromXContent); } public boolean queryClose(String cursor) throws SQLException { - SqlClearCursorResponse response = post(SqlClearCursorAction.REST_ENDPOINT, - new SqlClearCursorRequest(AbstractSqlRequest.Mode.PLAIN, cursor), + SqlClearCursorResponse response = post(Protocol.CLEAR_CURSOR_REST_ENDPOINT, + new SqlClearCursorRequest(Mode.PLAIN, cursor), SqlClearCursorResponse::fromXContent); return response.isSucceeded(); } @@ -167,4 +166,4 @@ private Response fromXContent(XContentType xContentType, BytesReferen throw new ClientException("Cannot parse response", ex); } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java index 8c58769b75962..23f1a6049dc2c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.sql.planner.Planner; import org.elasticsearch.xpack.sql.planner.PlanningException; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.RowSet; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AstBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AstBuilder.java index de28f33187260..48aa2cf1fa79b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AstBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AstBuilder.java @@ -8,7 +8,7 @@ import org.antlr.v4.runtime.Token; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SingleStatementContext; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.util.Map; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java index bf432a7236357..7ce65aa4cfec1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java @@ -33,7 +33,7 @@ import org.elasticsearch.xpack.sql.plan.logical.command.sys.SysTableTypes; import org.elasticsearch.xpack.sql.plan.logical.command.sys.SysTables; import org.elasticsearch.xpack.sql.plan.logical.command.sys.SysTypes; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -190,4 +190,4 @@ public SysTypes visitSysTypes(SysTypesContext ctx) { public Object visitSysTableTypes(SysTableTypesContext ctx) { return new SysTableTypes(source(ctx)); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index b14611f9f599f..a6185def278a1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -76,7 +76,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -516,4 +516,4 @@ private SqlTypedParamValue param(TerminalNode node) { return params.get(token); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java index f41fce1602783..3435994a0fc42 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java @@ -41,7 +41,7 @@ import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.UnresolvedRelation; import org.elasticsearch.xpack.sql.plan.logical.With; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.session.EmptyExecutable; import org.elasticsearch.xpack.sql.type.DataType; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java index 7aa3748e31eae..b7fe9178f911f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.util.Arrays; import java.util.BitSet; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java index 4d47ca8c373e1..534d0459180e0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java @@ -12,23 +12,25 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.Protocol; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.xpack.sql.plugin.SqlClearCursorAction.REST_ENDPOINT; + public class RestSqlClearCursorAction extends BaseRestHandler { public RestSqlClearCursorAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(POST, REST_ENDPOINT, this); + controller.registerHandler(POST, Protocol.CLEAR_CURSOR_REST_ENDPOINT, this); } @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { SqlClearCursorRequest sqlRequest; try (XContentParser parser = request.contentOrSourceParamParser()) { - sqlRequest = SqlClearCursorRequest.fromXContent(parser, AbstractSqlRequest.Mode.fromString(request.param("mode"))); + sqlRequest = SqlClearCursorRequest.fromXContent(parser, Mode.fromString(request.param("mode"))); } return channel -> client.executeLocally(SqlClearCursorAction.INSTANCE, sqlRequest, new RestToXContentListener<>(channel)); } @@ -37,4 +39,4 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli public String getName() { return "sql_translate_action"; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index 9d043f855fd44..9e34a3fb2e097 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -18,6 +18,8 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestResponseListener; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; @@ -31,15 +33,15 @@ public class RestSqlQueryAction extends BaseRestHandler { public RestSqlQueryAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(GET, SqlQueryAction.REST_ENDPOINT, this); - controller.registerHandler(POST, SqlQueryAction.REST_ENDPOINT, this); + controller.registerHandler(GET, Protocol.SQL_QUERY_REST_ENDPOINT, this); + controller.registerHandler(POST, Protocol.SQL_QUERY_REST_ENDPOINT, this); } @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { SqlQueryRequest sqlRequest; try (XContentParser parser = request.contentOrSourceParamParser()) { - sqlRequest = SqlQueryRequest.fromXContent(parser, AbstractSqlRequest.Mode.fromString(request.param("mode"))); + sqlRequest = SqlQueryRequest.fromXContent(parser,Mode.fromString(request.param("mode"))); } /* diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java index 6167e4e571dff..503ee84314820 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.sql.proto.Mode; import java.io.IOException; @@ -32,7 +33,7 @@ public RestSqlTranslateAction(Settings settings, RestController controller) { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { SqlTranslateRequest sqlRequest; try (XContentParser parser = request.contentOrSourceParamParser()) { - sqlRequest = SqlTranslateRequest.fromXContent(parser, AbstractSqlRequest.Mode.fromString(request.param("mode"))); + sqlRequest = SqlTranslateRequest.fromXContent(parser, Mode.fromString(request.param("mode"))); } return channel -> client.executeLocally(SqlTranslateAction.INSTANCE, sqlRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlLicenseChecker.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlLicenseChecker.java index 8a3ef973d6bf1..b15ff6a1ae4aa 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlLicenseChecker.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlLicenseChecker.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.sql.plugin; +import org.elasticsearch.xpack.sql.proto.Mode; + import java.util.function.Consumer; /** @@ -12,16 +14,16 @@ */ public class SqlLicenseChecker { - private final Consumer checkIfSqlAllowed; + private final Consumer checkIfSqlAllowed; - public SqlLicenseChecker(Consumer checkIfSqlAllowed) { + public SqlLicenseChecker(Consumer checkIfSqlAllowed) { this.checkIfSqlAllowed = checkIfSqlAllowed; } /** * Throws an ElasticsearchSecurityException if the specified mode is not allowed */ - public void checkIfSqlAllowed(AbstractSqlRequest.Mode mode) { + public void checkIfSqlAllowed(Mode mode) { checkIfSqlAllowed.accept(mode); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormat.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormat.java index 349a481cf660f..9d0cd60c23e32 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormat.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormat.java @@ -7,6 +7,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -38,17 +39,17 @@ String format(Cursor cursor, RestRequest request, SqlQueryResponse response) { final CliFormatter formatter; if (cursor instanceof CliFormatterCursor) { formatter = ((CliFormatterCursor) cursor).getCliFormatter(); - return formatter.formatWithoutHeader(response); + return formatter.formatWithoutHeader(response.rows()); } else { - formatter = new CliFormatter(response); - return formatter.formatWithHeader(response); + formatter = new CliFormatter(response.columns(), response.rows()); + return formatter.formatWithHeader(response.columns(), response.rows()); } } @Override Cursor wrapCursor(Cursor oldCursor, SqlQueryResponse response) { CliFormatter formatter = (oldCursor instanceof CliFormatterCursor) ? - ((CliFormatterCursor) oldCursor).getCliFormatter() : new CliFormatter(response); + ((CliFormatterCursor) oldCursor).getCliFormatter() : new CliFormatter(response.columns(), response.rows()); return CliFormatterCursor.wrap(super.wrapCursor(oldCursor, response), formatter); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 5b59ced7a494d..46429e2d50829 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Cursors; import org.elasticsearch.xpack.sql.session.RowSet; @@ -26,7 +27,7 @@ import java.util.List; import static java.util.Collections.unmodifiableList; -import static org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest.Mode.JDBC; +import static org.elasticsearch.xpack.sql.proto.Mode.JDBC; public class TransportSqlQueryAction extends HandledTransportAction { private final PlanExecutor planExecutor; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java index 681a5eb1fbd24..ae43d4a988922 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java @@ -9,16 +9,14 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest; +import org.elasticsearch.xpack.sql.proto.Protocol; import java.util.TimeZone; -// Typed object holding properties for a given action +// Typed object holding properties for a given action public class Configuration { public static final Configuration DEFAULT = new Configuration(TimeZone.getTimeZone("UTC"), - AbstractSqlQueryRequest.DEFAULT_FETCH_SIZE, - AbstractSqlQueryRequest.DEFAULT_REQUEST_TIMEOUT, - AbstractSqlQueryRequest.DEFAULT_PAGE_TIMEOUT, - null); + Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null); private TimeZone timeZone; private int pageSize; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java index 880e98c606408..65da32c3122ab 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.sql.planner.Planner; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.rule.RuleExecutor; import java.util.List; @@ -162,4 +162,4 @@ public void sqlExecutable(String sql, List params, ActionLis public Configuration settings() { return settings; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java index b36fa811d3b25..22a7889f6247e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java @@ -7,10 +7,10 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest.Mode; -import org.elasticsearch.xpack.sql.plugin.ColumnInfo; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.plugin.SqlQueryAction; import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; +import org.elasticsearch.xpack.sql.proto.Mode; import java.sql.JDBCType; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java index 0cd8c33b11688..bac221df2e92d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.sql.SqlException; import org.elasticsearch.xpack.sql.plugin.CliFormatter; import org.elasticsearch.xpack.sql.plugin.CliFormatterCursor; -import org.elasticsearch.xpack.sql.plugin.ColumnInfo; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Cursor; @@ -80,7 +80,8 @@ static Cursor randomNonEmptyCursor() { () -> { SqlQueryResponse response = createRandomSqlResponse(); if (response.columns() != null && response.rows() != null) { - return CliFormatterCursor.wrap(ScrollCursorTests.randomScrollCursor(), new CliFormatter(response)); + return CliFormatterCursor.wrap(ScrollCursorTests.randomScrollCursor(), + new CliFormatter(response.columns(), response.rows())); } else { return ScrollCursorTests.randomScrollCursor(); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java index 5e35965985987..37ab5fb2b6ce3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.Equals; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.parser.SqlParser; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Arrays; @@ -28,7 +28,7 @@ public class ParameterTests extends ESTestCase { public void testSingleParameter() { Expression expression = new SqlParser().createExpression("a = \n?", Collections.singletonList( - new SqlTypedParamValue("foo", DataType.KEYWORD) + new SqlTypedParamValue(DataType.KEYWORD, "foo") )); logger.info(expression); assertThat(expression, instanceOf(Equals.class)); @@ -42,10 +42,10 @@ public void testSingleParameter() { public void testMultipleParameters() { Expression expression = new SqlParser().createExpression("(? + ? * ?) - ?", Arrays.asList( - new SqlTypedParamValue(1L, DataType.LONG), - new SqlTypedParamValue(2L, DataType.LONG), - new SqlTypedParamValue(3L, DataType.LONG), - new SqlTypedParamValue(4L, DataType.LONG) + new SqlTypedParamValue(DataType.LONG, 1L), + new SqlTypedParamValue(DataType.LONG, 2L), + new SqlTypedParamValue(DataType.LONG, 3L), + new SqlTypedParamValue(DataType.LONG, 4L) )); assertThat(expression, instanceOf(Sub.class)); Sub sub = (Sub) expression; @@ -62,9 +62,9 @@ public void testMultipleParameters() { public void testNotEnoughParameters() { ParsingException ex = expectThrows(ParsingException.class, () -> new SqlParser().createExpression("(? + ? * ?) - ?", Arrays.asList( - new SqlTypedParamValue(1L, DataType.LONG), - new SqlTypedParamValue(2L, DataType.LONG), - new SqlTypedParamValue(3L, DataType.LONG) + new SqlTypedParamValue(DataType.LONG, 1L), + new SqlTypedParamValue(DataType.LONG, 2L), + new SqlTypedParamValue(DataType.LONG, 3L) ))); assertThat(ex.getMessage(), containsString("Not enough actual parameters")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java index c94bcf0e664c4..b2abf0b680054 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.regex.Like; import org.elasticsearch.xpack.sql.expression.regex.LikePattern; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Locale; @@ -33,7 +33,7 @@ private LikePattern like(String pattern) { Expression exp = null; boolean parameterized = randomBoolean(); if (parameterized) { - exp = parser.createExpression("exp LIKE ?", singletonList(new SqlTypedParamValue(pattern, DataType.KEYWORD))); + exp = parser.createExpression("exp LIKE ?", singletonList(new SqlTypedParamValue(DataType.KEYWORD, pattern))); } else { exp = parser.createExpression(String.format(Locale.ROOT, "exp LIKE '%s'", pattern)); } @@ -63,9 +63,9 @@ public void testInvalidChar() { assertThat(error("'%string' ESCAPE '%'"), is("line 1:28: Char [%] cannot be used for escaping")); } - + public void testCannotUseStar() { assertThat(error("'|*string' ESCAPE '|'"), is("line 1:11: Invalid char [*] found in pattern [|*string] at position 1; use [%] or [_] instead")); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java index c08c423be34eb..e42ec51b425d2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.command.Command; -import org.elasticsearch.xpack.sql.plugin.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -228,7 +228,7 @@ public void testSysTablesTypesEnumerationWoString() throws Exception { } private SqlTypedParamValue param(Object value) { - return new SqlTypedParamValue(value, DataTypes.fromJava(value)); + return new SqlTypedParamValue(DataTypes.fromJava(value), value); } private Tuple sql(String sql, List params) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CliFormatterTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CliFormatterTests.java index 1fe3c9fc89e99..d87dba3306889 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CliFormatterTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CliFormatterTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; import java.sql.JDBCType; import java.util.Arrays; @@ -23,17 +24,17 @@ public class CliFormatterTests extends ESTestCase { Arrays.asList( Arrays.asList("15charwidedata!", 1, 6.888, 12, "rabbit"), Arrays.asList("dog", 1.7976931348623157E308, 123124.888, 9912, "goat"))); - private final CliFormatter formatter = new CliFormatter(firstResponse); + private final CliFormatter formatter = new CliFormatter(firstResponse.columns(), firstResponse.rows()); /** - * Tests for {@link CliFormatter#formatWithHeader(SqlQueryResponse)}, values + * Tests for {@link CliFormatter#formatWithHeader}, values * of exactly the minimum column size, column names of exactly * the minimum column size, column headers longer than the * minimum column size, and values longer than the minimum * column size. */ public void testFormatWithHeader() { - String[] result = formatter.formatWithHeader(firstResponse).split("\n"); + String[] result = formatter.formatWithHeader(firstResponse.columns(), firstResponse.rows()).split("\n"); assertThat(result, arrayWithSize(4)); assertEquals(" foo | bar |15charwidename!|superduperwidename!!!| baz ", result[0]); assertEquals("---------------+----------------------+---------------+---------------------+---------------", result[1]); @@ -42,14 +43,14 @@ public void testFormatWithHeader() { } /** - * Tests for {@link CliFormatter#formatWithoutHeader(SqlQueryResponse)} and + * Tests for {@link CliFormatter#formatWithoutHeader} and * truncation of long columns. */ public void testFormatWithoutHeader() { - String[] result = formatter.formatWithoutHeader(new SqlQueryResponse("", null, + String[] result = formatter.formatWithoutHeader( Arrays.asList( Arrays.asList("ohnotruncateddata", 4, 1, 77, "wombat"), - Arrays.asList("dog", 2, 123124.888, 9912, "goat")))).split("\n"); + Arrays.asList("dog", 2, 123124.888, 9912, "goat"))).split("\n"); assertThat(result, arrayWithSize(2)); assertEquals("ohnotruncatedd~|4 |1 |77 |wombat ", result[0]); assertEquals("dog |2 |123124.888 |9912 |goat ", result[1]); @@ -59,9 +60,9 @@ public void testFormatWithoutHeader() { * Ensure that our estimates are perfect in at least some cases. */ public void testEstimateSize() { - assertEquals(formatter.formatWithHeader(firstResponse).length(), + assertEquals(formatter.formatWithHeader(firstResponse.columns(), firstResponse.rows()).length(), formatter.estimateSize(firstResponse.rows().size() + 2)); - assertEquals(formatter.formatWithoutHeader(firstResponse).length(), + assertEquals(formatter.formatWithoutHeader(firstResponse.rows()).length(), formatter.estimateSize(firstResponse.rows().size())); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java index 1c6bbfa69e816..bf6ccbb225a54 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; import java.util.ArrayList; import java.util.List; From 69481b4059353d197586e3092e667e45f208f35c Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 14 May 2018 18:27:36 -0400 Subject: [PATCH 10/74] LLRest: Add equals and hashcode tests for Request (#30584) Adds tests for the `Request` object's equals and hashcode to remove a `TODO` and because we use the `equals` method in other testing. --- .../elasticsearch/client/RequestTests.java | 102 +++++++++++++++++- 1 file changed, 101 insertions(+), 1 deletion(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest/src/test/java/org/elasticsearch/client/RequestTests.java index b83115a5341dd..6625c389c6be8 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RequestTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RequestTests.java @@ -26,13 +26,16 @@ import org.apache.http.Header; import org.apache.http.HttpEntity; +import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; import org.apache.http.nio.entity.NStringEntity; +import org.elasticsearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; @@ -151,6 +154,103 @@ public void testSetHeaders() { assertArrayEquals(headers, request.getHeaders()); } - // TODO equals and hashcode + public void testEqualsAndHashCode() { + Request request = randomRequest(); + assertEquals(request, request); + Request copy = copy(request); + assertEquals(request, copy); + assertEquals(copy, request); + assertEquals(request.hashCode(), copy.hashCode()); + + Request mutant = mutate(request); + assertNotEquals(request, mutant); + assertNotEquals(mutant, request); + } + + private Request randomRequest() { + Request request = new Request( + randomFrom(new String[] {"GET", "PUT", "DELETE", "POST", "HEAD", "OPTIONS"}), + randomAsciiAlphanumOfLength(5)); + + int parameterCount = between(0, 5); + for (int i = 0; i < parameterCount; i++) { + request.addParameter(randomAsciiAlphanumOfLength(i), randomAsciiLettersOfLength(3)); + } + + if (randomBoolean()) { + if (randomBoolean()) { + request.setJsonEntity(randomAsciiAlphanumOfLength(10)); + } else { + request.setEntity(randomFrom(new HttpEntity[] { + new StringEntity(randomAsciiAlphanumOfLength(10), ContentType.APPLICATION_JSON), + new NStringEntity(randomAsciiAlphanumOfLength(10), ContentType.APPLICATION_JSON), + new ByteArrayEntity(randomBytesOfLength(40), ContentType.APPLICATION_JSON) + })); + } + } + + if (randomBoolean()) { + int headerCount = between(1, 5); + Header[] headers = new Header[headerCount]; + for (int i = 0; i < headerCount; i++) { + headers[i] = new BasicHeader(randomAsciiAlphanumOfLength(3), randomAsciiAlphanumOfLength(3)); + } + request.setHeaders(headers); + } + + if (randomBoolean()) { + request.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(1)); + } + + return request; + } + + private Request copy(Request request) { + Request copy = new Request(request.getMethod(), request.getEndpoint()); + copyMutables(request, copy); + return copy; + } + + private Request mutate(Request request) { + if (randomBoolean()) { + // Mutate request or method but keep everything else constant + Request mutant = randomBoolean() + ? new Request(request.getMethod() + "m", request.getEndpoint()) + : new Request(request.getMethod(), request.getEndpoint() + "m"); + copyMutables(request, mutant); + return mutant; + } + Request mutant = copy(request); + int mutationType = between(0, 3); + switch (mutationType) { + case 0: + mutant.addParameter(randomAsciiAlphanumOfLength(mutant.getParameters().size() + 4), "extra"); + return mutant; + case 1: + mutant.setJsonEntity("mutant"); // randomRequest can't produce this value + return mutant; + case 2: + if (mutant.getHeaders().length > 0) { + mutant.setHeaders(new Header[0]); + } else { + mutant.setHeaders(new BasicHeader("extra", "m")); + } + return mutant; + case 3: + mutant.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(5)); + return mutant; + default: + throw new UnsupportedOperationException("Unknown mutation type [" + mutationType + "]"); + } + } + + private void copyMutables(Request from, Request to) { + for (Map.Entry param : from.getParameters().entrySet()) { + to.addParameter(param.getKey(), param.getValue()); + } + to.setEntity(from.getEntity()); + to.setHeaders(from.getHeaders()); + to.setHttpAsyncResponseConsumerFactory(from.getHttpAsyncResponseConsumerFactory()); + } } From 7f47ff9fcd1b6af8128baffaacd99192d5033aa8 Mon Sep 17 00:00:00 2001 From: lcawl Date: Mon, 14 May 2018 15:35:02 -0700 Subject: [PATCH 11/74] [DOCS] Fixes title capitalization in security content --- x-pack/docs/en/security/auditing.asciidoc | 51 ++++++++++--------- .../active-directory-realm.asciidoc | 1 + .../authentication/anonymous-access.asciidoc | 3 +- .../authentication/built-in-users.asciidoc | 1 + .../authentication/custom-realm.asciidoc | 7 +-- .../authentication/file-realm.asciidoc | 1 + .../authentication/internal-users.asciidoc | 1 + .../authentication/ldap-realm.asciidoc | 1 + .../authentication/native-realm.asciidoc | 1 + .../security/authentication/overview.asciidoc | 1 + .../authentication/pki-realm.asciidoc | 1 + .../security/authentication/realms.asciidoc | 1 + .../authentication/saml-guide.asciidoc | 3 +- .../authentication/saml-realm.asciidoc | 1 + .../authentication/user-cache.asciidoc | 5 +- .../authorization/alias-privileges.asciidoc | 3 +- .../custom-roles-provider.asciidoc | 7 +-- ...field-and-document-access-control.asciidoc | 13 ++--- .../authorization/mapping-roles.asciidoc | 13 ++--- .../overview.asciidoc} | 25 ++++----- .../authorization/run-as-privilege.asciidoc | 3 +- .../ccs-clients-integrations.asciidoc | 3 +- .../docs/en/security/getting-started.asciidoc | 3 +- .../en/security/how-security-works.asciidoc | 5 +- x-pack/docs/en/security/index.asciidoc | 5 +- x-pack/docs/en/security/limitations.asciidoc | 1 + x-pack/docs/en/security/reference.asciidoc | 1 + .../security/securing-communications.asciidoc | 5 +- .../docs/en/security/troubleshooting.asciidoc | 1 + .../en/security/using-ip-filtering.asciidoc | 7 +-- 30 files changed, 102 insertions(+), 72 deletions(-) rename x-pack/docs/en/security/{authorization.asciidoc => authorization/overview.asciidoc} (96%) diff --git a/x-pack/docs/en/security/auditing.asciidoc b/x-pack/docs/en/security/auditing.asciidoc index 6cd31d076f94f..ee508a5ac8d2d 100644 --- a/x-pack/docs/en/security/auditing.asciidoc +++ b/x-pack/docs/en/security/auditing.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[auditing]] -== Auditing Security Events +== Auditing security events You can enable auditing to keep track of security-related events such as authentication failures and refused connections. Logging these events enables you @@ -40,7 +41,7 @@ events are pushed to the index by setting [float] [[audit-event-types]] -=== Audit Event Types +=== Audit event types Each request may generate multiple audit events. The following is a list of the events that can be generated: @@ -81,11 +82,11 @@ The following is a list of the events that can be generated: [float] [[audit-event-attributes]] -=== Audit Event Attributes +=== Audit event attributes The following table shows the common attributes that can be associated with every event. -.Common Attributes +.Common attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -103,7 +104,7 @@ The following table shows the common attributes that can be associated with ever The following tables show the attributes that can be associated with each type of event. The log level determines which attributes are included in a log entry. -.REST anonymous_access_denied Attributes +.REST anonymous_access_denied attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -112,7 +113,7 @@ The log level determines which attributes are included in a log entry. | `request_body` | The body of the request, if enabled. |====== -.REST authentication_success Attributes +.REST authentication_success attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -123,7 +124,7 @@ The log level determines which attributes are included in a log entry. | `request_body` | The body of the request, if enabled. |====== -.REST authentication_failed Attributes +.REST authentication_failed attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -133,7 +134,7 @@ The log level determines which attributes are included in a log entry. | `request_body` | The body of the request, if enabled. |====== -.REST realm_authentication_failed Attributes +.REST realm_authentication_failed attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -146,7 +147,7 @@ The log level determines which attributes are included in a log entry. consulted realm. |====== -.Transport anonymous_access_denied Attributes +.Transport anonymous_access_denied attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -161,7 +162,7 @@ The log level determines which attributes are included in a log entry. pertains to (when applicable). |====== -.Transport authentication_success Attributes +.Transport authentication_success attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -176,7 +177,7 @@ The log level determines which attributes are included in a log entry. | `request` | The type of request that was executed. |====== -.Transport authentication_failed Attributes +.Transport authentication_failed attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -192,7 +193,7 @@ The log level determines which attributes are included in a log entry. pertains to (when applicable). |====== -.Transport realm_authentication_failed Attributes +.Transport realm_authentication_failed attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -211,7 +212,7 @@ The log level determines which attributes are included in a log entry. consulted realm. |====== -.Transport access_granted Attributes +.Transport access_granted attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -228,7 +229,7 @@ The log level determines which attributes are included in a log entry. pertains to (when applicable). |====== -.Transport access_denied Attributes +.Transport access_denied attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -245,7 +246,7 @@ The log level determines which attributes are included in a log entry. relates to (when applicable). |====== -.Transport tampered_request Attributes +.Transport tampered_request attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -261,7 +262,7 @@ The log level determines which attributes are included in a log entry. pertains to (when applicable). |====== -.IP Filter connection_granted Attributes +.IP filter connection_granted attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -271,7 +272,7 @@ The log level determines which attributes are included in a log entry. the request. |====== -.IP Filter connection_denied Attributes +.IP filter connection_denied attributes [cols="2,7",options="header"] |====== | Attribute | Description @@ -283,14 +284,14 @@ The log level determines which attributes are included in a log entry. [float] [[audit-log-output]] -=== Logfile Audit Output +=== Logfile audit output The `logfile` audit output is the default output for auditing. It writes data to the `_access.log` file in the logs directory. [float] [[audit-log-entry-format]] -=== Log Entry Format +=== Log entry format The format of a log entry is: @@ -318,7 +319,7 @@ The format of a log entry is: [float] [[audit-log-settings]] -=== Logfile Output Settings +=== Logfile output settings The events and some other information about what gets logged can be controlled using settings in the `elasticsearch.yml` file. See @@ -336,7 +337,7 @@ file located in `CONFIG_DIR`. By default, audit information is appended to the [float] [[audit-log-ignore-policy]] -=== Logfile Audit Events Ignore Policies +=== Logfile audit events ignore policies The comprehensive audit trail is necessary to ensure accountability. It offers tremendous value during incident response and can even be required for demonstrating compliance. @@ -414,7 +415,7 @@ xpack.security.audit.logfile.events.ignore_filters: [float] [[audit-index]] -=== Index Audit Output +=== Index audit output In addition to logging to a file, you can store audit logs in Elasticsearch rolling indices. These indices can be either on the same cluster, or on a @@ -429,13 +430,13 @@ xpack.security.audit.outputs: [ index, logfile ] ---------------------------- For more configuration options, see -{ref}/auditing-settings.html#index-audit-settings[Audit Log Indexing Configuration Settings]. +{ref}/auditing-settings.html#index-audit-settings[Audit log indexing configuration settings]. IMPORTANT: No filtering is performed when auditing, so sensitive data may be audited in plain text when including the request body in audit events. [float] -==== Audit Index Settings +==== Audit index settings You can also configure settings for the indices that the events are stored in. These settings are configured in the `xpack.security.audit.index.settings` namespace @@ -451,7 +452,7 @@ xpack.security.audit.index.settings: ---------------------------- [float] -==== Forwarding Audit Logs to a Remote Cluster +==== Forwarding audit logs to a remote cluster To index audit events to a remote Elasticsearch cluster, you configure the following `xpack.security.audit.index.client` settings: diff --git a/x-pack/docs/en/security/authentication/active-directory-realm.asciidoc b/x-pack/docs/en/security/authentication/active-directory-realm.asciidoc index 2069176172e31..c0461f4f33885 100644 --- a/x-pack/docs/en/security/authentication/active-directory-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/active-directory-realm.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[active-directory-realm]] === Active Directory user authentication diff --git a/x-pack/docs/en/security/authentication/anonymous-access.asciidoc b/x-pack/docs/en/security/authentication/anonymous-access.asciidoc index c95328e99a3eb..983348f8cf584 100644 --- a/x-pack/docs/en/security/authentication/anonymous-access.asciidoc +++ b/x-pack/docs/en/security/authentication/anonymous-access.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[anonymous-access]] -=== Enabling Anonymous Access +=== Enabling anonymous access Incoming requests are considered to be _anonymous_ if no authentication token can be extracted from the incoming request. By default, anonymous requests are rejected and an authentication error is returned (status code `401`). diff --git a/x-pack/docs/en/security/authentication/built-in-users.asciidoc b/x-pack/docs/en/security/authentication/built-in-users.asciidoc index 2400643755abd..74fc9f1e1db12 100644 --- a/x-pack/docs/en/security/authentication/built-in-users.asciidoc +++ b/x-pack/docs/en/security/authentication/built-in-users.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[built-in-users]] === Built-in users diff --git a/x-pack/docs/en/security/authentication/custom-realm.asciidoc b/x-pack/docs/en/security/authentication/custom-realm.asciidoc index a7df6f5ff865b..8e0114b7454c6 100644 --- a/x-pack/docs/en/security/authentication/custom-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/custom-realm.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[custom-realms]] -=== Integrating with Other Authentication Systems +=== Integrating with other authentication systems If you are using an authentication system that is not supported out-of-the-box by {security}, you can create a custom realm to interact with it to authenticate @@ -7,7 +8,7 @@ users. You implement a custom realm as an SPI loaded security extension as part of an ordinary elasticsearch plugin. [[implementing-custom-realm]] -==== Implementing a Custom Realm +==== Implementing a custom realm Sample code that illustrates the structure and implementation of a custom realm is provided in the https://github.com/elastic/shield-custom-realm-example[custom-realm-example] @@ -70,7 +71,7 @@ part of the `SecurityExtension` interface, it's available as part of the elastic . Bundle all in a single zip file. [[using-custom-realm]] -==== Using a Custom Realm to Authenticate Users +==== Using a custom realm to authenticate users To use a custom realm: diff --git a/x-pack/docs/en/security/authentication/file-realm.asciidoc b/x-pack/docs/en/security/authentication/file-realm.asciidoc index cf6f5cacd1c17..1161778bb801c 100644 --- a/x-pack/docs/en/security/authentication/file-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/file-realm.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[file-realm]] === File-based user authentication diff --git a/x-pack/docs/en/security/authentication/internal-users.asciidoc b/x-pack/docs/en/security/authentication/internal-users.asciidoc index 53468363dc8d0..77571a53a56f3 100644 --- a/x-pack/docs/en/security/authentication/internal-users.asciidoc +++ b/x-pack/docs/en/security/authentication/internal-users.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[internal-users]] === Internal users diff --git a/x-pack/docs/en/security/authentication/ldap-realm.asciidoc b/x-pack/docs/en/security/authentication/ldap-realm.asciidoc index 205c18429bc98..02d0162a9c9f9 100644 --- a/x-pack/docs/en/security/authentication/ldap-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/ldap-realm.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[ldap-realm]] === LDAP user authentication diff --git a/x-pack/docs/en/security/authentication/native-realm.asciidoc b/x-pack/docs/en/security/authentication/native-realm.asciidoc index 3643e42e02a1c..f7b514b81449f 100644 --- a/x-pack/docs/en/security/authentication/native-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/native-realm.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[native-realm]] === Native user authentication diff --git a/x-pack/docs/en/security/authentication/overview.asciidoc b/x-pack/docs/en/security/authentication/overview.asciidoc index ada5453c7a765..da5f6a4ea3cea 100644 --- a/x-pack/docs/en/security/authentication/overview.asciidoc +++ b/x-pack/docs/en/security/authentication/overview.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[setting-up-authentication]] == User authentication diff --git a/x-pack/docs/en/security/authentication/pki-realm.asciidoc b/x-pack/docs/en/security/authentication/pki-realm.asciidoc index 4fc91717f9342..6ce9b0e0770a4 100644 --- a/x-pack/docs/en/security/authentication/pki-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/pki-realm.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[pki-realm]] === PKI user authentication diff --git a/x-pack/docs/en/security/authentication/realms.asciidoc b/x-pack/docs/en/security/authentication/realms.asciidoc index 7bd48c5c8f017..ec0945b5a113c 100644 --- a/x-pack/docs/en/security/authentication/realms.asciidoc +++ b/x-pack/docs/en/security/authentication/realms.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[realms]] === Realms diff --git a/x-pack/docs/en/security/authentication/saml-guide.asciidoc b/x-pack/docs/en/security/authentication/saml-guide.asciidoc index d1f7961fecbf3..740f51c877ded 100644 --- a/x-pack/docs/en/security/authentication/saml-guide.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-guide.asciidoc @@ -1,6 +1,7 @@ +[role="xpack"] [[saml-guide]] -== Configuring SAML single-sign-on on the Elastic Stack +== Configuring SAML single-sign-on on the {stack} The Elastic Stack supports SAML single-sign-on (SSO) into {kib}, using {es} as a backend service. In SAML terminology, the Elastic Stack is operating as a diff --git a/x-pack/docs/en/security/authentication/saml-realm.asciidoc b/x-pack/docs/en/security/authentication/saml-realm.asciidoc index c05f82d341b03..a55ae270a19a1 100644 --- a/x-pack/docs/en/security/authentication/saml-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-realm.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[saml-realm]] === SAML authentication {security} supports user authentication using SAML Single Sign On. diff --git a/x-pack/docs/en/security/authentication/user-cache.asciidoc b/x-pack/docs/en/security/authentication/user-cache.asciidoc index ba2b363a843ed..36af070bf067b 100644 --- a/x-pack/docs/en/security/authentication/user-cache.asciidoc +++ b/x-pack/docs/en/security/authentication/user-cache.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[controlling-user-cache]] -=== Controlling the User Cache +=== Controlling the user cache User credentials are cached in memory on each node to avoid connecting to a remote authentication service or hitting the disk for every incoming request. @@ -34,7 +35,7 @@ setting the `cache_hash_algo` setting to any of the following: |======================= [[cache-eviction-api]] -==== Evicting Users from the Cache +==== Evicting users from the cache {security} exposes a {ref}/security-api-clear-cache.html[Clear Cache API] you can use diff --git a/x-pack/docs/en/security/authorization/alias-privileges.asciidoc b/x-pack/docs/en/security/authorization/alias-privileges.asciidoc index 6916e2ab2ca30..05c9359df5aeb 100644 --- a/x-pack/docs/en/security/authorization/alias-privileges.asciidoc +++ b/x-pack/docs/en/security/authorization/alias-privileges.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[securing-aliases]] -=== Granting Privileges for Indices & Aliases +=== Granting privileges for indices and aliases Elasticsearch allows to execute operations against {ref}/indices-aliases.html[index aliases], which are effectively virtual indices. An alias points to one or more indices, diff --git a/x-pack/docs/en/security/authorization/custom-roles-provider.asciidoc b/x-pack/docs/en/security/authorization/custom-roles-provider.asciidoc index 9056467ced9f9..c218fa04f8ec7 100644 --- a/x-pack/docs/en/security/authorization/custom-roles-provider.asciidoc +++ b/x-pack/docs/en/security/authorization/custom-roles-provider.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[custom-roles-provider]] -=== Custom Roles Provider Extension +=== Custom roles provider extension If you need to retrieve user roles from a system not supported out-of-the-box by {security}, you can create a custom roles provider to retrieve and resolve @@ -7,7 +8,7 @@ roles. You implement a custom roles provider as an SPI loaded security extension as part of an ordinary elasticsearch plugin. [[implementing-custom-roles-provider]] -==== Implementing a Custom Roles Provider +==== Implementing a custom roles provider To create a custom roles provider: @@ -62,7 +63,7 @@ part of the `SecurityExtension` interface, it's available as part of the elastic . Bundle all in a single zip file. [[using-custom-roles-provider]] -==== Using a Custom Roles Provider to Resolve Roles +==== Using a custom roles provider to resolve roles To use a custom roles provider: diff --git a/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc b/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc index 88d0e157ca052..a1aa44895c6a6 100644 --- a/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc +++ b/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[field-and-document-access-control]] -=== Setting Up Field and Document Level Security +=== Setting up field and document level security You can control access to data within an index by adding field and document level security permissions to a role. Field level security permissions restrict access @@ -23,7 +24,7 @@ document level permissions per index. See <>. ===================================================================== [[field-level-security]] -==== Field Level Security +==== Field level security To enable field level security, specify the fields that each role can access as part of the indices permissions in a role definition. Field level security is @@ -235,7 +236,7 @@ The resulting permission is equal to: [[document-level-security]] -==== Document Level Security +==== Document level security Document level security restricts the documents that users have read access to. To enable document level security, specify a query that matches all the @@ -292,7 +293,7 @@ For example, the following role grants read access only to the documents whose NOTE: `query` also accepts queries written as string values. [[templating-role-query]] -===== Templating a Role Query +===== Templating a role query You can use Mustache templates in a role query to insert the username of the current authenticated user into the role. Like other places in {es} that support @@ -358,7 +359,7 @@ based on the `group.id` field in your documents: -------------------------------------------------- [[set-security-user-processor]] -===== Set Security User Ingest Processor +===== Set security user ingest processor If an index is shared by many small users it makes sense to put all these users into the same index. Having a dedicated index or shard per user is wasteful. @@ -416,7 +417,7 @@ to the `user` field for all documents that are processed by this pipeline: -------------------------------------------------- [[multiple-roles-dls-fls]] -==== Multiple Roles with Document and Field Level Security +==== Multiple roles with document and field level security A user can have many roles and each role can define different permissions on the same index. It is important to understand the behavior of document and field diff --git a/x-pack/docs/en/security/authorization/mapping-roles.asciidoc b/x-pack/docs/en/security/authorization/mapping-roles.asciidoc index fba87db978626..cf8373a65f335 100644 --- a/x-pack/docs/en/security/authorization/mapping-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/mapping-roles.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[mapping-roles]] -=== Mapping Users and Groups to Roles +=== Mapping users and groups to roles If you authenticate users with the `native` or `file` realms, you can manage role assignment by using the <> or @@ -24,13 +25,13 @@ you are able to map users to both API-managed roles and file-managed roles (and likewise for file-based role-mappings). [[mapping-roles-api]] -==== Using the Role Mapping API +==== Using the role mapping API You can define role-mappings through the {ref}/security-api-role-mapping.html[role mapping API]. [[mapping-roles-file]] -==== Using Role Mapping Files +==== Using role mapping files To use file based role-mappings, you must configure the mappings in a YAML file and copy it to each node in the cluster. Tools like Puppet or Chef can help with @@ -56,10 +57,10 @@ You can change this default behavior by changing the this is a common setting in Elasticsearch, changing its value might effect other schedules in the system. -==== Realm Specific Details +==== Realm specific details [float] [[ldap-role-mapping]] -===== Active Directory and LDAP Realms +===== Active Directory and LDAP realms To specify users and groups in the role mappings, you use their _Distinguished Names_ (DNs). A DN is a string that uniquely identifies the user @@ -113,7 +114,7 @@ PUT _xpack/security/role_mapping/basic_users [float] [[pki-role-mapping]] -===== PKI Realms +===== PKI realms PKI realms support mapping users to roles, but you cannot map groups as the PKI realm has no notion of a group. diff --git a/x-pack/docs/en/security/authorization.asciidoc b/x-pack/docs/en/security/authorization/overview.asciidoc similarity index 96% rename from x-pack/docs/en/security/authorization.asciidoc rename to x-pack/docs/en/security/authorization/overview.asciidoc index ed171415056da..9dc8185db4d34 100644 --- a/x-pack/docs/en/security/authorization.asciidoc +++ b/x-pack/docs/en/security/authorization/overview.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[authorization]] -== Configuring Role-based Access Control +== Configuring role-based access control {security} introduces the concept of _authorization_ to {es}. Authorization is the process of determining whether the user behind an incoming @@ -8,7 +9,7 @@ successfully authenticated and the user behind the request is identified. [[roles]] [float] -=== Roles, Permissions and Privileges +=== Roles, permissions, and privileges The authorization process revolves around the following 5 constructs: @@ -49,7 +50,7 @@ then assign users to the roles. These can be assigned to users in a number of ways depending on the realms by which the users are authenticated. [[built-in-roles]] -=== Built-in Roles +=== Built-in roles {security} applies a default role to all users, including <>. The default role enables users to access @@ -164,7 +165,7 @@ stats. [[defining-roles]] -=== Defining Roles +=== Defining roles A role is defined by the following JSON structure: @@ -276,14 +277,14 @@ see <>. [float] [[roles-management-ui]] -=== Role Management UI +=== Role management UI {security} enables you to easily manage users and roles from within {kib}. To manage roles, log in to {kib} and go to *Management / Elasticsearch / Roles*. [float] [[roles-management-api]] -=== Role Management API +=== Role management API The _Role Management APIs_ enable you to add, update, remove and retrieve roles dynamically. When you use the APIs to manage roles in the `native` realm, the @@ -292,7 +293,7 @@ see {ref}/security-api-roles.html[Role Management APIs]. [float] [[roles-management-file]] -=== File-based Role Management +=== File-based role management Apart from the _Role Management APIs_, roles can also be defined in local `roles.yml` file located in `CONFIG_DIR`. This is a YAML file where each @@ -338,12 +339,12 @@ click_admins: {security} continuously monitors the `roles.yml` file and automatically picks up and applies any changes to it. -include::authorization/alias-privileges.asciidoc[] +include::alias-privileges.asciidoc[] -include::authorization/mapping-roles.asciidoc[] +include::mapping-roles.asciidoc[] -include::authorization/field-and-document-access-control.asciidoc[] +include::field-and-document-access-control.asciidoc[] -include::authorization/run-as-privilege.asciidoc[] +include::run-as-privilege.asciidoc[] -include::authorization/custom-roles-provider.asciidoc[] +include::custom-roles-provider.asciidoc[] diff --git a/x-pack/docs/en/security/authorization/run-as-privilege.asciidoc b/x-pack/docs/en/security/authorization/run-as-privilege.asciidoc index e246f2b194281..93d11c0ab2af9 100644 --- a/x-pack/docs/en/security/authorization/run-as-privilege.asciidoc +++ b/x-pack/docs/en/security/authorization/run-as-privilege.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[run-as-privilege]] -=== Submitting Requests on Behalf of Other Users +=== Submitting requests on behalf of other users {security} supports a permission that enables an authenticated user to submit requests on behalf of other users. If your application already authenticates diff --git a/x-pack/docs/en/security/ccs-clients-integrations.asciidoc b/x-pack/docs/en/security/ccs-clients-integrations.asciidoc index e25586dfb371c..cbf4ede328e48 100644 --- a/x-pack/docs/en/security/ccs-clients-integrations.asciidoc +++ b/x-pack/docs/en/security/ccs-clients-integrations.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ccs-clients-integrations]] -== Cross Cluster Search, Clients and Integrations +== Cross cluster search, clients, and integrations When using {ref}/modules-cross-cluster-search.html[Cross Cluster Search] you need to take extra steps to secure communications with the connected diff --git a/x-pack/docs/en/security/getting-started.asciidoc b/x-pack/docs/en/security/getting-started.asciidoc index 8aa35a9428160..b8f1183cddf89 100644 --- a/x-pack/docs/en/security/getting-started.asciidoc +++ b/x-pack/docs/en/security/getting-started.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[security-getting-started]] -== Getting Started with Security +== Getting started with security To secure a cluster, you must enable {security} on every node in the cluster. Basic authentication is enabled by default--to communicate diff --git a/x-pack/docs/en/security/how-security-works.asciidoc b/x-pack/docs/en/security/how-security-works.asciidoc index ae402dfe05eb4..dcc152c2bcaab 100644 --- a/x-pack/docs/en/security/how-security-works.asciidoc +++ b/x-pack/docs/en/security/how-security-works.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[how-security-works]] -== How Security Works +== How security works An Elasticsearch cluster is typically made out of many moving parts. There are the Elasticsearch nodes that form the cluster, and often Logstash instances, @@ -64,7 +65,7 @@ For more information on user authentication see <> [float] -=== Node/Client Authentication and Channel Encryption +=== Node/client authentication and channel encryption {security} supports configuring SSL/TLS for securing the communication channels to, from and within the cluster. This support accounts for: diff --git a/x-pack/docs/en/security/index.asciidoc b/x-pack/docs/en/security/index.asciidoc index 188353d01a3fb..d5f970a3fb826 100644 --- a/x-pack/docs/en/security/index.asciidoc +++ b/x-pack/docs/en/security/index.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[xpack-security]] -= Securing the Elastic Stack += Securing the {stack} [partintro] -- @@ -100,7 +101,7 @@ include::how-security-works.asciidoc[] include::authentication/overview.asciidoc[] -include::authorization.asciidoc[] +include::authorization/overview.asciidoc[] include::auditing.asciidoc[] diff --git a/x-pack/docs/en/security/limitations.asciidoc b/x-pack/docs/en/security/limitations.asciidoc index c2616ac6565bd..c127ee3d7967c 100644 --- a/x-pack/docs/en/security/limitations.asciidoc +++ b/x-pack/docs/en/security/limitations.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[security-limitations]] == Security Limitations diff --git a/x-pack/docs/en/security/reference.asciidoc b/x-pack/docs/en/security/reference.asciidoc index 90668651b5d50..21138138cfbf9 100644 --- a/x-pack/docs/en/security/reference.asciidoc +++ b/x-pack/docs/en/security/reference.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[security-reference]] == Reference * <> diff --git a/x-pack/docs/en/security/securing-communications.asciidoc b/x-pack/docs/en/security/securing-communications.asciidoc index e876ce9160b86..ef07f0113cb59 100644 --- a/x-pack/docs/en/security/securing-communications.asciidoc +++ b/x-pack/docs/en/security/securing-communications.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[encrypting-communications]] -== Encrypting Communications +== Encrypting communications Elasticsearch nodes store data that may be confidential. Attacks on the data may come from the network. These attacks could include sniffing of the data, @@ -21,7 +22,7 @@ include::securing-communications/setting-up-ssl.asciidoc[] //TO-DO: These sections can be removed when all links to them are removed. [[ciphers]] -=== Enabling Cipher Suites for Stronger Encryption +=== Enabling cipher suites for stronger encryption See {ref}/ciphers.html[Enabling Cipher Suites for Stronger Encryption]. diff --git a/x-pack/docs/en/security/troubleshooting.asciidoc b/x-pack/docs/en/security/troubleshooting.asciidoc index e805ed07a7dec..c202ed9dbedb0 100644 --- a/x-pack/docs/en/security/troubleshooting.asciidoc +++ b/x-pack/docs/en/security/troubleshooting.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[security-troubleshooting]] == {security} Troubleshooting ++++ diff --git a/x-pack/docs/en/security/using-ip-filtering.asciidoc b/x-pack/docs/en/security/using-ip-filtering.asciidoc index 37beced5a9455..817975c69de9d 100644 --- a/x-pack/docs/en/security/using-ip-filtering.asciidoc +++ b/x-pack/docs/en/security/using-ip-filtering.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ip-filtering]] -== Restricting Connections with IP Filtering +== Restricting connections with IP filtering You can apply IP filtering to application clients, node clients, or transport clients, in addition to other nodes that are attempting to join the cluster. @@ -92,7 +93,7 @@ transport.profiles.client.xpack.security.filter.deny: _all NOTE: When you do not specify a profile, `default` is used automatically. [float] -=== HTTP Filtering +=== HTTP filtering You may want to have different IP filtering for the transport and HTTP protocols. @@ -106,7 +107,7 @@ xpack.security.http.filter.deny: _all [float] [[dynamic-ip-filtering]] -==== Dynamically updating ip filter settings +==== Dynamically updating IP filter settings In case of running in an environment with highly dynamic IP addresses like cloud based hosting, it is very hard to know the IP addresses upfront when provisioning From 9881bfaea58ef7afbd8d4e77aef83cf855366175 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 14 May 2018 18:40:54 -0400 Subject: [PATCH 12/74] Docs: Document how to rebuild analyzers (#30498) Adds documentation for how to rebuild all the built in analyzers and tests for that documentation using the mechanism added in #29535. Closes #29499 --- .../analyzers/fingerprint-analyzer.asciidoc | 57 ++++++++++++----- .../analyzers/keyword-analyzer.asciidoc | 45 +++++++++++--- .../analyzers/pattern-analyzer.asciidoc | 61 +++++++++++++++---- .../analyzers/simple-analyzer.asciidoc | 42 ++++++++++--- .../analyzers/standard-analyzer.asciidoc | 54 ++++++++++++---- .../analysis/analyzers/stop-analyzer.asciidoc | 58 ++++++++++++++---- .../analyzers/whitespace-analyzer.asciidoc | 42 ++++++++++--- 7 files changed, 284 insertions(+), 75 deletions(-) diff --git a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc index 53c7d913ad2f1..cc873a4fe89ff 100644 --- a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc @@ -9,20 +9,6 @@ Input text is lowercased, normalized to remove extended characters, sorted, deduplicated and concatenated into a single token. If a stopword list is configured, stop words will also be removed. -[float] -=== Definition - -It consists of: - -Tokenizer:: -* <> - -Token Filters (in order):: -1. <> -2. <> -3. <> (disabled by default) -4. <> - [float] === Example output @@ -149,3 +135,46 @@ The above example produces the following term: --------------------------- [ consistent godel said sentence yes ] --------------------------- + +[float] +=== Definition + +The `fingerprint` tokenizer consists of: + +Tokenizer:: +* <> + +Token Filters (in order):: +* <> +* <> +* <> (disabled by default) +* <> + +If you need to customize the `fingerprint` analyzer beyond the configuration +parameters then you need to recreate it as a `custom` analyzer and modify +it, usually by adding token filters. This would recreate the built-in +`fingerprint` analyzer and you can use it as a starting point for further +customization: + +[source,js] +---------------------------------------------------- +PUT /fingerprint_example +{ + "settings": { + "analysis": { + "analyzer": { + "rebuilt_fingerprint": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "asciifolding", + "fingerprint" + ] + } + } + } + } +} +---------------------------------------------------- +// CONSOLE +// TEST[s/\n$/\nstartyaml\n - compare_analyzers: {index: fingerprint_example, first: fingerprint, second: rebuilt_fingerprint}\nendyaml\n/] diff --git a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc index cc94f3b757e37..954b514ced605 100644 --- a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc @@ -4,14 +4,6 @@ The `keyword` analyzer is a ``noop'' analyzer which returns the entire input string as a single token. -[float] -=== Definition - -It consists of: - -Tokenizer:: -* <> - [float] === Example output @@ -57,3 +49,40 @@ The above sentence would produce the following single term: === Configuration The `keyword` analyzer is not configurable. + +[float] +=== Definition + +The `keyword` analyzer consists of: + +Tokenizer:: +* <> + +If you need to customize the `keyword` analyzer then you need to +recreate it as a `custom` analyzer and modify it, usually by adding +token filters. Usually, you should prefer the +<> when you want strings that are not split +into tokens, but just in case you need it, this would recreate the +built-in `keyword` analyzer and you can use it as a starting point +for further customization: + +[source,js] +---------------------------------------------------- +PUT /keyword_example +{ + "settings": { + "analysis": { + "analyzer": { + "rebuilt_keyword": { + "tokenizer": "keyword", + "filter": [ <1> + ] + } + } + } + } +} +---------------------------------------------------- +// CONSOLE +// TEST[s/\n$/\nstartyaml\n - compare_analyzers: {index: keyword_example, first: keyword, second: rebuilt_keyword}\nendyaml\n/] +<1> You'd add any token filters here. diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 64ab3999ef9a9..027f37280a67d 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -19,19 +19,6 @@ Read more about http://www.regular-expressions.info/catastrophic.html[pathologic ======================================== - -[float] -=== Definition - -It consists of: - -Tokenizer:: -* <> - -Token Filters:: -* <> -* <> (disabled by default) - [float] === Example output @@ -378,3 +365,51 @@ The regex above is easier to understand as: [\p{L}&&[^\p{Lu}]] # then lower case ) -------------------------------------------------- + +[float] +=== Definition + +The `pattern` anlayzer consists of: + +Tokenizer:: +* <> + +Token Filters:: +* <> +* <> (disabled by default) + +If you need to customize the `pattern` analyzer beyond the configuration +parameters then you need to recreate it as a `custom` analyzer and modify +it, usually by adding token filters. This would recreate the built-in +`pattern` analyzer and you can use it as a starting point for further +customization: + +[source,js] +---------------------------------------------------- +PUT /pattern_example +{ + "settings": { + "analysis": { + "tokenizer": { + "split_on_non_word": { + "type": "pattern", + "pattern": "\\W+" <1> + } + }, + "analyzer": { + "rebuilt_pattern": { + "tokenizer": "split_on_non_word", + "filter": [ + "lowercase" <2> + ] + } + } + } + } +} +---------------------------------------------------- +// CONSOLE +// TEST[s/\n$/\nstartyaml\n - compare_analyzers: {index: pattern_example, first: pattern, second: rebuilt_pattern}\nendyaml\n/] +<1> The default pattern is `\W+` which splits on non-word characters +and this is where you'd change it. +<2> You'd add other token filters after `lowercase`. diff --git a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc index a57c30d8dd622..d82655d9bd8e1 100644 --- a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc @@ -4,14 +4,6 @@ The `simple` analyzer breaks text into terms whenever it encounters a character which is not a letter. All terms are lower cased. -[float] -=== Definition - -It consists of: - -Tokenizer:: -* <> - [float] === Example output @@ -127,3 +119,37 @@ The above sentence would produce the following terms: === Configuration The `simple` analyzer is not configurable. + +[float] +=== Definition + +The `simple` analzyer consists of: + +Tokenizer:: +* <> + +If you need to customize the `simple` analyzer then you need to recreate +it as a `custom` analyzer and modify it, usually by adding token filters. +This would recreate the built-in `simple` analyzer and you can use it as +a starting point for further customization: + +[source,js] +---------------------------------------------------- +PUT /simple_example +{ + "settings": { + "analysis": { + "analyzer": { + "rebuilt_simple": { + "tokenizer": "lowercase", + "filter": [ <1> + ] + } + } + } + } +} +---------------------------------------------------- +// CONSOLE +// TEST[s/\n$/\nstartyaml\n - compare_analyzers: {index: simple_example, first: simple, second: rebuilt_simple}\nendyaml\n/] +<1> You'd add any token filters here. diff --git a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc index eacbb1c3cad99..20aa072066b5f 100644 --- a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc @@ -7,19 +7,6 @@ Segmentation algorithm, as specified in http://unicode.org/reports/tr29/[Unicode Standard Annex #29]) and works well for most languages. -[float] -=== Definition - -It consists of: - -Tokenizer:: -* <> - -Token Filters:: -* <> -* <> -* <> (disabled by default) - [float] === Example output @@ -276,3 +263,44 @@ The above example produces the following terms: --------------------------- [ 2, quick, brown, foxes, jumpe, d, over, lazy, dog's, bone ] --------------------------- + +[float] +=== Definition + +The `standard` analyzer consists of: + +Tokenizer:: +* <> + +Token Filters:: +* <> +* <> +* <> (disabled by default) + +If you need to customize the `standard` analyzer beyond the configuration +parameters then you need to recreate it as a `custom` analyzer and modify +it, usually by adding token filters. This would recreate the built-in +`standard` analyzer and you can use it as a starting point: + +[source,js] +---------------------------------------------------- +PUT /standard_example +{ + "settings": { + "analysis": { + "analyzer": { + "rebuilt_standard": { + "tokenizer": "standard", + "filter": [ + "standard", + "lowercase" <1> + ] + } + } + } + } +} +---------------------------------------------------- +// CONSOLE +// TEST[s/\n$/\nstartyaml\n - compare_analyzers: {index: standard_example, first: standard, second: rebuilt_standard}\nendyaml\n/] +<1> You'd add any token filters after `lowercase`. diff --git a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc index eacc7e106e799..1b84797d94761 100644 --- a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc @@ -5,17 +5,6 @@ The `stop` analyzer is the same as the <> - -Token filters:: -* <> - [float] === Example output @@ -239,3 +228,50 @@ The above example produces the following terms: --------------------------- [ quick, brown, foxes, jumped, lazy, dog, s, bone ] --------------------------- + +[float] +=== Definition + +It consists of: + +Tokenizer:: +* <> + +Token filters:: +* <> + +If you need to customize the `stop` analyzer beyond the configuration +parameters then you need to recreate it as a `custom` analyzer and modify +it, usually by adding token filters. This would recreate the built-in +`stop` analyzer and you can use it as a starting point for further +customization: + +[source,js] +---------------------------------------------------- +PUT /stop_example +{ + "settings": { + "analysis": { + "filter": { + "english_stop": { + "type": "stop", + "stopwords": "_english_" <1> + } + }, + "analyzer": { + "rebuilt_stop": { + "tokenizer": "lowercase", + "filter": [ + "english_stop" <2> + ] + } + } + } + } +} +---------------------------------------------------- +// CONSOLE +// TEST[s/\n$/\nstartyaml\n - compare_analyzers: {index: stop_example, first: stop, second: rebuilt_stop}\nendyaml\n/] +<1> The default stopwords can be overridden with the `stopwords` + or `stopwords_path` parameters. +<2> You'd add any token filters after `english_stop`. diff --git a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc index f95e5c6e4ab65..31ba8d9ce8f24 100644 --- a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc @@ -4,14 +4,6 @@ The `whitespace` analyzer breaks text into terms whenever it encounters a whitespace character. -[float] -=== Definition - -It consists of: - -Tokenizer:: -* <> - [float] === Example output @@ -120,3 +112,37 @@ The above sentence would produce the following terms: === Configuration The `whitespace` analyzer is not configurable. + +[float] +=== Definition + +It consists of: + +Tokenizer:: +* <> + +If you need to customize the `whitespace` analyzer then you need to +recreate it as a `custom` analyzer and modify it, usually by adding +token filters. This would recreate the built-in `whitespace` analyzer +and you can use it as a starting point for further customization: + +[source,js] +---------------------------------------------------- +PUT /whitespace_example +{ + "settings": { + "analysis": { + "analyzer": { + "rebuilt_whitespace": { + "tokenizer": "whitespace", + "filter": [ <1> + ] + } + } + } + } +} +---------------------------------------------------- +// CONSOLE +// TEST[s/\n$/\nstartyaml\n - compare_analyzers: {index: whitespace_example, first: whitespace, second: rebuilt_whitespace}\nendyaml\n/] +<1> You'd add any token filters here. From 21d67d1bd7a2d2523dec1ec6f3997d49498ad646 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 14 May 2018 15:49:00 -0700 Subject: [PATCH 13/74] [DOCS] Adds release highlight pages (#30590) --- docs/reference/index-shared4.asciidoc | 2 ++ .../release-notes/highlights-7.0.0.asciidoc | 9 +++++++++ docs/reference/release-notes/highlights.asciidoc | 13 +++++++++++++ 3 files changed, 24 insertions(+) create mode 100644 docs/reference/release-notes/highlights-7.0.0.asciidoc create mode 100644 docs/reference/release-notes/highlights.asciidoc diff --git a/docs/reference/index-shared4.asciidoc b/docs/reference/index-shared4.asciidoc index 5e6ebc8a5a20c..3dfb3b641890f 100644 --- a/docs/reference/index-shared4.asciidoc +++ b/docs/reference/index-shared4.asciidoc @@ -5,4 +5,6 @@ include::testing.asciidoc[] include::glossary.asciidoc[] +include::release-notes/highlights.asciidoc[] + include::{docdir}/../CHANGELOG.asciidoc[] \ No newline at end of file diff --git a/docs/reference/release-notes/highlights-7.0.0.asciidoc b/docs/reference/release-notes/highlights-7.0.0.asciidoc new file mode 100644 index 0000000000000..1ea3d3fa3291e --- /dev/null +++ b/docs/reference/release-notes/highlights-7.0.0.asciidoc @@ -0,0 +1,9 @@ +[[release-highlights-7.0.0]] +== 7.0.0 release highlights +++++ +7.0.0 +++++ + +coming[7.0.0] + +See also <> and <>. diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc new file mode 100644 index 0000000000000..1223e9a685a27 --- /dev/null +++ b/docs/reference/release-notes/highlights.asciidoc @@ -0,0 +1,13 @@ +[[release-highlights]] += {es} Release Highlights + +[partintro] +-- +This section summarizes the most important changes in each release. For the +full list, see <> and <>. + +* <> + +-- + +include::highlights-7.0.0.asciidoc[] \ No newline at end of file From 15790e1b56b4fd34c93223236e475b72317b32c6 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 15 May 2018 02:14:35 +0300 Subject: [PATCH 14/74] Silence IndexUpgradeIT test failures. (#30430) --- .../java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index ef5c3acc3d238..9f1fb95ed4835 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.upgrade; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.search.SearchResponse; @@ -30,6 +31,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.core.IsEqual.equalTo; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30430") public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { @Before From 6517ac98eb060baa63fe1e843f8526a117fcdcae Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 15 May 2018 09:57:34 +1000 Subject: [PATCH 15/74] Fail if reading from closed KeyStoreWrapper (#30394) In #28255 the implementation of the elasticsearch.keystore was changed to no longer be built on top of a PKCS#12 keystore. A side effect of that change was that calling getString or getFile on a closed KeyStoreWrapper ceased to throw an exception, and would instead return a value consisting of all 0 bytes. This change restores the previous behaviour as closely as possible. It is possible to retrieve the _keys_ from a closed keystore, but any attempt to get or set the entries will throw an IllegalStateException. --- .../common/settings/KeyStoreWrapper.java | 42 ++++++++++++------- .../common/settings/KeyStoreWrapperTests.java | 15 +++++++ 2 files changed, 43 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 04bbb9279dab5..f47760491f8d5 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -158,6 +158,7 @@ private static class Entry { /** The decrypted secret data. See {@link #decrypt(char[])}. */ private final SetOnce> entries = new SetOnce<>(); + private volatile boolean closed; private KeyStoreWrapper(int formatVersion, boolean hasPassword, byte[] dataBytes) { this.formatVersion = formatVersion; @@ -448,8 +449,8 @@ private void decryptLegacyEntries() throws GeneralSecurityException, IOException } /** Write the keystore to the given config directory. */ - public void save(Path configDir, char[] password) throws Exception { - assert isLoaded(); + public synchronized void save(Path configDir, char[] password) throws Exception { + ensureOpen(); SimpleFSDirectory directory = new SimpleFSDirectory(configDir); // write to tmp file first, then overwrite @@ -500,16 +501,22 @@ public void save(Path configDir, char[] password) throws Exception { } } + /** + * It is possible to retrieve the setting names even if the keystore is closed. + * This allows {@link SecureSetting} to correctly determine that a entry exists even though it cannot be read. Thus attempting to + * read a secure setting after the keystore is closed will generate a "keystore is closed" exception rather than using the fallback + * setting. + */ @Override public Set getSettingNames() { - assert isLoaded(); + assert entries.get() != null : "Keystore is not loaded"; return entries.get().keySet(); } // TODO: make settings accessible only to code that registered the setting @Override - public SecureString getString(String setting) { - assert isLoaded(); + public synchronized SecureString getString(String setting) { + ensureOpen(); Entry entry = entries.get().get(setting); if (entry == null || entry.type != EntryType.STRING) { throw new IllegalArgumentException("Secret setting " + setting + " is not a string"); @@ -520,13 +527,12 @@ public SecureString getString(String setting) { } @Override - public InputStream getFile(String setting) { - assert isLoaded(); + public synchronized InputStream getFile(String setting) { + ensureOpen(); Entry entry = entries.get().get(setting); if (entry == null || entry.type != EntryType.FILE) { throw new IllegalArgumentException("Secret setting " + setting + " is not a file"); } - return new ByteArrayInputStream(entry.bytes); } @@ -543,8 +549,8 @@ public static void validateSettingName(String setting) { } /** Set a string setting. */ - void setString(String setting, char[] value) { - assert isLoaded(); + synchronized void setString(String setting, char[] value) { + ensureOpen(); validateSettingName(setting); ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(CharBuffer.wrap(value)); @@ -556,8 +562,8 @@ void setString(String setting, char[] value) { } /** Set a file setting. */ - void setFile(String setting, byte[] bytes) { - assert isLoaded(); + synchronized void setFile(String setting, byte[] bytes) { + ensureOpen(); validateSettingName(setting); Entry oldEntry = entries.get().put(setting, new Entry(EntryType.FILE, Arrays.copyOf(bytes, bytes.length))); @@ -568,15 +574,23 @@ void setFile(String setting, byte[] bytes) { /** Remove the given setting from the keystore. */ void remove(String setting) { - assert isLoaded(); + ensureOpen(); Entry oldEntry = entries.get().remove(setting); if (oldEntry != null) { Arrays.fill(oldEntry.bytes, (byte)0); } } + private void ensureOpen() { + if (closed) { + throw new IllegalStateException("Keystore is closed"); + } + assert isLoaded() : "Keystore is not loaded"; + } + @Override - public void close() { + public synchronized void close() { + this.closed = true; for (Entry entry : entries.get().values()) { Arrays.fill(entry.bytes, (byte)0); } diff --git a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java index e22836087367c..849841943ecc6 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java @@ -48,11 +48,13 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.instanceOf; public class KeyStoreWrapperTests extends ESTestCase { @@ -97,6 +99,19 @@ public void testCreate() throws Exception { assertTrue(keystore.getSettingNames().contains(KeyStoreWrapper.SEED_SETTING.getKey())); } + public void testCannotReadStringFromClosedKeystore() throws Exception { + KeyStoreWrapper keystore = KeyStoreWrapper.create(); + assertThat(keystore.getSettingNames(), Matchers.hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); + assertThat(keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()), notNullValue()); + + keystore.close(); + + assertThat(keystore.getSettingNames(), Matchers.hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); + final IllegalStateException exception = expectThrows(IllegalStateException.class, + () -> keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey())); + assertThat(exception.getMessage(), containsString("closed")); + } + public void testUpgradeNoop() throws Exception { KeyStoreWrapper keystore = KeyStoreWrapper.create(); SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()); From 848f2409264618e09b5a3add95623d33805e29c0 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Mon, 14 May 2018 19:19:53 -0600 Subject: [PATCH 16/74] Fix issue with finishing handshake in ssl driver (#30580) This is fixing an issue that has come up in some builds. In some scenarios I see an assertion failure that we are trying to move to application mode when we are not in handshake mode. What I think is happening is that we are in handshake mode and have received the completed handshake message AND an application message. While reading in handshake mode we switch to application mode. However, there is still data to be consumed so we attempt to continue to read in handshake mode. This leads to us attempting to move to application mode again throwing an assertion. This commit fixes this by immediatly exiting the handshake mode read method if we are not longer in handshake mode. Additionally if we swap modes during a read we attempt to read with the new mode to see if there is data that needs to be handled. --- .../xpack/security/transport/nio/SSLDriver.java | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java index a44d39a0d7a56..c143978468dfd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java @@ -113,7 +113,13 @@ public ByteBuffer getNetworkReadBuffer() { } public void read(InboundChannelBuffer buffer) throws SSLException { - currentMode.read(buffer); + Mode modePriorToRead; + do { + modePriorToRead = currentMode; + currentMode.read(buffer); + // If we switched modes we want to read again as there might be unhandled bytes that need to be + // handled by the new mode. + } while (modePriorToRead != currentMode); } public boolean readyForApplicationWrites() { @@ -365,8 +371,9 @@ public void read(InboundChannelBuffer buffer) throws SSLException { try { SSLEngineResult result = unwrap(buffer); handshakeStatus = result.getHandshakeStatus(); - continueUnwrap = result.bytesConsumed() > 0; handshake(); + // If we are done handshaking we should exit the handshake read + continueUnwrap = result.bytesConsumed() > 0 && currentMode.isHandshake(); } catch (SSLException e) { closingInternal(); throw e; From 0f85c6429cff3f369383d256bc63c05df07bd88c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 14 May 2018 21:57:08 -0400 Subject: [PATCH 17/74] Remove the changelog (#30593) We are starting over on the changelog with a different approach. This commit removes the existing incarnation of the changelog to remove confusion that we need to continue adding entries to it. --- docs/CHANGELOG.asciidoc | 257 ---------------------------------------- 1 file changed, 257 deletions(-) delete mode 100644 docs/CHANGELOG.asciidoc diff --git a/docs/CHANGELOG.asciidoc b/docs/CHANGELOG.asciidoc deleted file mode 100644 index 6eb26fde8f9f8..0000000000000 --- a/docs/CHANGELOG.asciidoc +++ /dev/null @@ -1,257 +0,0 @@ -[[es-release-notes]] -= {es} Release Notes - -[partintro] --- -// To add a release, copy and paste the template text -// and add a link to the new section. Note that release subheads must -// be floated and sections cannot be empty. - -// Use these for links to issue and pulls. Note issues and pulls redirect one to -// each other on Github, so don't worry too much on using the right prefix. -:issue: https://github.com/elastic/elasticsearch/issues/ -:pull: https://github.com/elastic/elasticsearch/pull/ - -This section summarizes the changes in each release. - -* <> -* <> -* <> - --- - -//// -// To add a release, copy and paste the following text, uncomment the relevant -// sections, and add a link to the new section in the list of releases at the -// top of the page. Note that release subheads must be floated and sections -// cannot be empty. -// TEMPLATE: - -// [[release-notes-n.n.n]] -// == {es} n.n.n - -//[float] -[[breaking-n.n.n]] -//=== Breaking Changes - -//[float] -//=== Breaking Java Changes - -//[float] -//=== Deprecations - -//[float] -//=== New Features - -//[float] -//=== Enhancements - -//[float] -//=== Bug Fixes - -//[float] -//=== Regressions - -//[float] -//=== Known Issues - -//// - -[[release-notes-7.0.0]] -== {es} 7.0.0 - -coming[7.0.0] - -[float] -[[breaking-7.0.0]] -=== Breaking Changes - -<> ({pull}29609[#29609]) - -<> ({pull}29004[#29004]) -<> ({pull}29635[#29635]) - -<> ({pull}30185[#30185]) - -Machine Learning:: -* The `max_running_jobs` node property is removed in this release. Use the -`xpack.ml.max_open_jobs` setting instead. For more information, see <>. - -* <> ({pull}29601[#29601]) - -//[float] -//=== Breaking Java Changes - -[float] -=== Deprecations -Monitoring:: -* The `xpack.monitoring.collection.interval` setting can no longer be set to `-1` -to disable monitoring data collection. Use `xpack.monitoring.collection.enabled` -and set it to `false` (its default), which was added in 6.3.0. - -Security:: -* The fields returned as part of the mappings section by get index, get -mappings, get field mappings, and field capabilities API are now only the -ones that the user is authorized to access in case field level security is enabled. - -//[float] -//=== New Features - -//[float] -//=== Enhancements - -[float] -=== Bug Fixes - -Use date format in `date_range` mapping before fallback to default ({pull}29310[#29310]) - -Fix NPE in 'more_like_this' when field has zero tokens ({pull}30365[#30365]) - -Fixed prerelease version of elasticsearch in the `deb` package to sort before GA versions -({pull}29000[#29000]) - -Rollup:: -* Validate timezone in range queries to ensure they match the selected job when -searching ({pull}30338[#30338]) - -SQL:: -* Fix parsing of Dates containing milliseconds ({pull}30419[#30419]) - -[float] -=== Regressions -Fail snapshot operations early when creating or deleting a snapshot on a repository that has been -written to by an older Elasticsearch after writing to it with a newer Elasticsearch version. ({pull}30140[#30140]) - -Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641]) -Do not fail snapshot when deleting a missing snapshotted file ({pull}30332[#30332]) - -//[float] -//=== Regressions - -//[float] -//=== Known Issues - -[[release-notes-6.4.0]] -== {es} 6.4.0 - -coming[6.4.0] - -//[float] -[[breaking-6.4.0]] -//=== Breaking Changes - -//[float] -//=== Breaking Java Changes - -[float] -=== Deprecations - -Deprecated multi-argument versions of the request methods in the RestClient. -Prefer the "Request" object flavored methods. ({pull}30315[#30315]) - -[float] -=== New Features - -The new <> field allows to know which fields -got ignored at index time because of the <> -option. ({pull}30140[#29658]) - -A new analysis plugin called `analysis_nori` that exposes the Lucene Korean -analysis module. ({pull}30397[#30397]) - -[float] -=== Enhancements - -{ref-64}/breaking_64_api_changes.html#copy-source-settings-on-resize[Allow -copying source settings on index resize operations] ({pull}30255[#30255], {pull}30404[#30404]) - -Added new "Request" object flavored request methods in the RestClient. Prefer -these instead of the multi-argument versions. ({pull}29623[#29623]) - -Added `setJsonEntity` to `Request` object so it is marginally easier to send JSON. ({pull}30447[#30447]) -Watcher HTTP client used in watches now allows more parallel connections to the -same endpoint and evicts long running connections. ({pull}30130[#30130]) - -The cluster state listener to decide if watcher should be -stopped/started/paused now runs far less code in an executor but is more -synchronous and predictable. Also the trigger engine thread is only started on -data nodes. And the Execute Watch API can be triggered regardless is watcher is -started or stopped. ({pull}30118[#30118]) - -Added put index template API to the high level rest client ({pull}30400[#30400]) - -Add ability to filter coordinating-only nodes when interacting with cluster -APIs. ({pull}30313[#30313]) - -[float] -=== Bug Fixes - -Use date format in `date_range` mapping before fallback to default ({pull}29310[#29310]) - -Fix NPE in 'more_like_this' when field has zero tokens ({pull}30365[#30365]) - -Do not ignore request analysis/similarity settings on index resize operations when the source index already contains such settings ({pull}30216[#30216]) - -Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641]) - -Machine Learning:: - -* Account for gaps in data counts after job is reopened ({pull}30294[#30294]) - -Add validation that geohashes are not empty and don't contain unsupported characters ({pull}30376[#30376]) - -Rollup:: -* Validate timezone in range queries to ensure they match the selected job when -searching ({pull}30338[#30338]) - -SQL:: -* Fix parsing of Dates containing milliseconds ({pull}30419[#30419]) - -Allocation:: - -Auto-expand replicas when adding or removing nodes to prevent shard copies from -being dropped and resynced when a data node rejoins the cluster ({pull}30423[#30423]) - -//[float] -//=== Regressions - -//[float] -//=== Known Issues - -[[release-notes-6.3.1]] -== Elasticsearch version 6.3.1 - -coming[6.3.1] - -//[float] -[[breaking-6.3.1]] -//=== Breaking Changes - -//[float] -//=== Breaking Java Changes - -//[float] -//=== Deprecations - -//[float] -//=== New Features - -//[float] -//=== Enhancements - -[float] -=== Bug Fixes - -Reduce the number of object allocations made by {security} when resolving the indices and aliases for a request ({pull}30180[#30180]) - -Respect accept header on requests with no handler ({pull}30383[#30383]) - -SQL:: -* Fix parsing of Dates containing milliseconds ({pull}30419[#30419]) - -//[float] -//=== Regressions - -//[float] -//=== Known Issues From 7dd816e77c761afacb0de662241a410e8f7618db Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 15 May 2018 14:28:29 +1000 Subject: [PATCH 18/74] Update build file due to doc file rename A file with uncoverted snippets was change as part of 7f47ff9, but build.gradle was not updated to reflect the rename. --- x-pack/docs/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index ab9bc99459968..ede446d6074f8 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -81,7 +81,7 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/ml/validate-job.asciidoc', 'en/rest-api/security/authenticate.asciidoc', 'en/rest-api/watcher/stats.asciidoc', - 'en/security/authorization.asciidoc', + 'en/security/authorization/overview.asciidoc', 'en/watcher/example-watches/watching-time-series-data.asciidoc', ] From 50c34b2a9bbdca25d04b624cccd320a8428d2309 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 15 May 2018 09:02:38 +0100 Subject: [PATCH 19/74] [ML] Reverse engineer Grok patterns from categorization results (#30125) This change adds a grok_pattern field to the GET categories API output in ML. It's calculated using the regex and examples in the categorization result, and applying a list of candidate Grok patterns to the bits in between the tokens that are considered to define the category. This can currently be considered a prototype, as the Grok patterns it produces are not optimal. However, enough people have said it would be useful for it to be worthwhile exposing it as experimental functionality for interested parties to try out. --- .../docs/en/rest-api/ml/get-category.asciidoc | 24 +- .../en/rest-api/ml/resultsresource.asciidoc | 7 + .../ml/job/results/CategoryDefinition.java | 26 +- x-pack/plugin/ml/build.gradle | 1 + .../action/TransportGetCategoriesAction.java | 2 +- .../categorization/GrokPatternCreator.java | 243 ++++++++++++++++++ .../xpack/ml/job/persistence/JobProvider.java | 20 +- .../AutodetectResultProcessorIT.java | 2 +- .../GrokPatternCreatorTests.java | 232 +++++++++++++++++ .../ml/job/persistence/JobProviderTests.java | 44 ++-- .../job/results/CategoryDefinitionTests.java | 3 + 11 files changed, 561 insertions(+), 43 deletions(-) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreatorTests.java diff --git a/x-pack/docs/en/rest-api/ml/get-category.asciidoc b/x-pack/docs/en/rest-api/ml/get-category.asciidoc index 37d0a95c14c71..9e69083355bbb 100644 --- a/x-pack/docs/en/rest-api/ml/get-category.asciidoc +++ b/x-pack/docs/en/rest-api/ml/get-category.asciidoc @@ -62,11 +62,11 @@ roles provide these privileges. For more information, see ==== Examples The following example gets information about one category for the -`it_ops_new_logs` job: +`esxi_log` job: [source,js] -------------------------------------------------- -GET _xpack/ml/anomaly_detectors/it_ops_new_logs/results/categories +GET _xpack/ml/anomaly_detectors/esxi_log/results/categories { "page":{ "size": 1 @@ -83,14 +83,18 @@ In this example, the API returns the following information: "count": 11, "categories": [ { - "job_id": "it_ops_new_logs", - "category_id": 1, - "terms": "Actual Transaction Already Voided Reversed hostname dbserver.acme.com physicalhost esxserver1.acme.com vmhost app1.acme.com", - "regex": ".*?Actual.+?Transaction.+?Already.+?Voided.+?Reversed.+?hostname.+?dbserver.acme.com.+?physicalhost.+?esxserver1.acme.com.+?vmhost.+?app1.acme.com.*", - "max_matching_length": 137, - "examples": [ - "Actual Transaction Already Voided / Reversed;hostname=dbserver.acme.com;physicalhost=esxserver1.acme.com;vmhost=app1.acme.com" - ] + "job_id" : "esxi_log", + "category_id" : 1, + "terms" : "Vpxa verbose vpxavpxaInvtVm opID VpxaInvtVmChangeListener Guest DiskInfo Changed", + "regex" : ".*?Vpxa.+?verbose.+?vpxavpxaInvtVm.+?opID.+?VpxaInvtVmChangeListener.+?Guest.+?DiskInfo.+?Changed.*", + "max_matching_length": 154, + "examples" : [ + "Oct 19 17:04:44 esxi1.acme.com Vpxa: [3CB3FB90 verbose 'vpxavpxaInvtVm' opID=WFU-33d82c31] [VpxaInvtVmChangeListener] Guest DiskInfo Changed", + "Oct 19 17:04:45 esxi2.acme.com Vpxa: [3CA66B90 verbose 'vpxavpxaInvtVm' opID=WFU-33927856] [VpxaInvtVmChangeListener] Guest DiskInfo Changed", + "Oct 19 17:04:51 esxi1.acme.com Vpxa: [FFDBAB90 verbose 'vpxavpxaInvtVm' opID=WFU-25e0d447] [VpxaInvtVmChangeListener] Guest DiskInfo Changed", + "Oct 19 17:04:58 esxi2.acme.com Vpxa: [FFDDBB90 verbose 'vpxavpxaInvtVm' opID=WFU-bbff0134] [VpxaInvtVmChangeListener] Guest DiskInfo Changed" + ], + "grok_pattern" : ".*?%{SYSLOGTIMESTAMP:timestamp}.+?Vpxa.+?%{BASE16NUM:field}.+?verbose.+?vpxavpxaInvtVm.+?opID.+?VpxaInvtVmChangeListener.+?Guest.+?DiskInfo.+?Changed.*" } ] } diff --git a/x-pack/docs/en/rest-api/ml/resultsresource.asciidoc b/x-pack/docs/en/rest-api/ml/resultsresource.asciidoc index fba6522141bf7..c28ed72aedb36 100644 --- a/x-pack/docs/en/rest-api/ml/resultsresource.asciidoc +++ b/x-pack/docs/en/rest-api/ml/resultsresource.asciidoc @@ -405,6 +405,13 @@ A category resource has the following properties: `examples`:: (array) A list of examples of actual values that matched the category. +`grok_pattern`:: + experimental[] (string) A Grok pattern that could be used in Logstash or an + Ingest Pipeline to extract fields from messages that match the category. This + field is experimental and may be changed or removed in a future release. The + Grok patterns that are found are not optimal, but are often a good starting + point for manual tweaking. + `job_id`:: (string) The unique identifier for the job that these results belong to. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/CategoryDefinition.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/CategoryDefinition.java index 98c38241856b6..90d01f66f632b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/CategoryDefinition.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/CategoryDefinition.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.ml.job.results; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -34,6 +35,7 @@ public class CategoryDefinition implements ToXContentObject, Writeable { public static final ParseField REGEX = new ParseField("regex"); public static final ParseField MAX_MATCHING_LENGTH = new ParseField("max_matching_length"); public static final ParseField EXAMPLES = new ParseField("examples"); + public static final ParseField GROK_PATTERN = new ParseField("grok_pattern"); // Used for QueryPage public static final ParseField RESULTS_FIELD = new ParseField("categories"); @@ -51,6 +53,7 @@ private static ConstructingObjectParser createParser(b parser.declareString(CategoryDefinition::setRegex, REGEX); parser.declareLong(CategoryDefinition::setMaxMatchingLength, MAX_MATCHING_LENGTH); parser.declareStringArray(CategoryDefinition::setExamples, EXAMPLES); + parser.declareString(CategoryDefinition::setGrokPattern, GROK_PATTERN); return parser; } @@ -61,6 +64,7 @@ private static ConstructingObjectParser createParser(b private String regex = ""; private long maxMatchingLength = 0L; private final Set examples; + private String grokPattern; public CategoryDefinition(String jobId) { this.jobId = jobId; @@ -74,6 +78,9 @@ public CategoryDefinition(StreamInput in) throws IOException { regex = in.readString(); maxMatchingLength = in.readLong(); examples = new TreeSet<>(in.readList(StreamInput::readString)); + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + grokPattern = in.readOptionalString(); + } } @Override @@ -84,6 +91,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(regex); out.writeLong(maxMatchingLength); out.writeStringList(new ArrayList<>(examples)); + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeOptionalString(grokPattern); + } } public String getJobId() { @@ -139,6 +149,14 @@ public void addExample(String example) { examples.add(example); } + public String getGrokPattern() { + return grokPattern; + } + + public void setGrokPattern(String grokPattern) { + this.grokPattern = grokPattern; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -148,6 +166,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(REGEX.getPreferredName(), regex); builder.field(MAX_MATCHING_LENGTH.getPreferredName(), maxMatchingLength); builder.field(EXAMPLES.getPreferredName(), examples); + if (grokPattern != null) { + builder.field(GROK_PATTERN.getPreferredName(), grokPattern); + } builder.endObject(); return builder; } @@ -166,11 +187,12 @@ public boolean equals(Object other) { && Objects.equals(this.terms, that.terms) && Objects.equals(this.regex, that.regex) && Objects.equals(this.maxMatchingLength, that.maxMatchingLength) - && Objects.equals(this.examples, that.examples); + && Objects.equals(this.examples, that.examples) + && Objects.equals(this.grokPattern, that.grokPattern); } @Override public int hashCode() { - return Objects.hash(jobId, categoryId, terms, regex, maxMatchingLength, examples); + return Objects.hash(jobId, categoryId, terms, regex, maxMatchingLength, examples, grokPattern); } } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index d9d4882b00e1c..8b991555c0670 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -46,6 +46,7 @@ dependencies { testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') // ml deps + compile project(':libs:grok') compile 'net.sf.supercsv:super-csv:2.4.0' nativeBundle "org.elasticsearch.ml:ml-cpp:${project.version}@zip" testCompile 'org.ini4j:ini4j:0.5.2' diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java index 25d0cc0cdf821..abf3a33052995 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java @@ -41,7 +41,7 @@ protected void doExecute(GetCategoriesAction.Request request, ActionListener listener.onResponse(new GetCategoriesAction.Response(r)), listener::onFailure, client); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java new file mode 100644 index 0000000000000..04280261b2634 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java @@ -0,0 +1,243 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.categorization; + +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.grok.Grok; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +/** + * Creates Grok patterns that will match all the examples in a given category_definition. + * + * The choice of field names is quite primitive. The intention is that a human will edit these. + */ +public final class GrokPatternCreator { + + private static String PREFACE = "preface"; + private static String EPILOGUE = "epilogue"; + + /** + * The first match in this list will be chosen, so it needs to be ordered + * such that more generic patterns come after more specific patterns. + */ + private static final List ORDERED_CANDIDATE_GROK_PATTERNS = Arrays.asList( + new GrokPatternCandidate("TIMESTAMP_ISO8601", "timestamp"), + new GrokPatternCandidate("DATESTAMP_RFC822", "timestamp"), + new GrokPatternCandidate("DATESTAMP_RFC2822", "timestamp"), + new GrokPatternCandidate("DATESTAMP_OTHER", "timestamp"), + new GrokPatternCandidate("DATESTAMP_EVENTLOG", "timestamp"), + new GrokPatternCandidate("SYSLOGTIMESTAMP", "timestamp"), + new GrokPatternCandidate("HTTPDATE", "timestamp"), + new GrokPatternCandidate("CATALINA_DATESTAMP", "timestamp"), + new GrokPatternCandidate("TOMCAT_DATESTAMP", "timestamp"), + new GrokPatternCandidate("CISCOTIMESTAMP", "timestamp"), + new GrokPatternCandidate("DATE", "date"), + new GrokPatternCandidate("TIME", "time"), + new GrokPatternCandidate("LOGLEVEL", "loglevel"), + new GrokPatternCandidate("URI", "uri"), + new GrokPatternCandidate("UUID", "uuid"), + new GrokPatternCandidate("MAC", "macaddress"), + // Can't use \b as the breaks, because slashes are not "word" characters + new GrokPatternCandidate("PATH", "path", "(? examples) { + + // The first string in this array will end up being the empty string, and it doesn't correspond + // to an "in between" bit. Although it could be removed for "neatness", it actually makes the + // loops below slightly neater if it's left in. + // + // E.g., ".*?cat.+?sat.+?mat.*" -> [ "", "cat", "sat", "mat" ] + String[] fixedRegexBits = regex.split("\\.[*+]\\??"); + + // Create a pattern that will capture the bits in between the fixed parts of the regex + // + // E.g., ".*?cat.+?sat.+?mat.*" -> Pattern (.*?)cat(.+?)sat(.+?)mat(.*) + Pattern exampleProcessor = Pattern.compile(regex.replaceAll("(\\.[*+]\\??)", "($1)"), Pattern.DOTALL); + + List> groupsMatchesFromExamples = new ArrayList<>(fixedRegexBits.length); + for (int i = 0; i < fixedRegexBits.length; ++i) { + groupsMatchesFromExamples.add(new ArrayList<>(examples.size())); + } + for (String example : examples) { + Matcher matcher = exampleProcessor.matcher(example); + if (matcher.matches()) { + assert matcher.groupCount() == fixedRegexBits.length; + // E.g., if the input regex was ".*?cat.+?sat.+?mat.*" then the example + // "the cat sat on the mat" will result in "the ", " ", " on the ", and "" + // being added to the 4 "in between" collections in that order + for (int groupNum = 1; groupNum <= matcher.groupCount(); ++groupNum) { + groupsMatchesFromExamples.get(groupNum - 1).add(matcher.group(groupNum)); + } + } else { + // We should never get here. If we do it implies a bug in the original categorization, + // as it's produced a regex that doesn't match the examples. + assert matcher.matches() : exampleProcessor.pattern() + " did not match " + example; + Loggers.getLogger(GrokPatternCreator.class).error("[{}] Pattern [{}] did not match example [{}]", jobId, + exampleProcessor.pattern(), example); + } + } + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + // Finally, for each collection of "in between" bits we look for the best Grok pattern and incorporate + // it into the overall Grok pattern that will match the each example in its entirety + for (int inBetweenBitNum = 0; inBetweenBitNum < groupsMatchesFromExamples.size(); ++inBetweenBitNum) { + // Remember (from the first comment in this method) that the first element in this array is + // always the empty string + overallGrokPatternBuilder.append(fixedRegexBits[inBetweenBitNum]); + appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, inBetweenBitNum == 0, + inBetweenBitNum == fixedRegexBits.length - 1, groupsMatchesFromExamples.get(inBetweenBitNum)); + } + return overallGrokPatternBuilder.toString(); + } + + /** + * Given a collection of strings, work out which (if any) of the grok patterns we're allowed + * to use matches it best. Then append the appropriate grok language to represent that finding + * onto the supplied string builder. + */ + static void appendBestGrokMatchForStrings(Map fieldNameCountStore, StringBuilder overallGrokPatternBuilder, + boolean isFirst, boolean isLast, Collection mustMatchStrings) { + + GrokPatternCandidate bestCandidate = null; + if (mustMatchStrings.isEmpty() == false) { + for (GrokPatternCandidate candidate : ORDERED_CANDIDATE_GROK_PATTERNS) { + if (mustMatchStrings.stream().allMatch(candidate.grok::match)) { + bestCandidate = candidate; + break; + } + } + } + + if (bestCandidate == null) { + if (isLast) { + overallGrokPatternBuilder.append(".*"); + } else if (isFirst || mustMatchStrings.stream().anyMatch(String::isEmpty)) { + overallGrokPatternBuilder.append(".*?"); + } else { + overallGrokPatternBuilder.append(".+?"); + } + } else { + Collection prefaces = new ArrayList<>(); + Collection epilogues = new ArrayList<>(); + populatePrefacesAndEpilogues(mustMatchStrings, bestCandidate.grok, prefaces, epilogues); + appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, isFirst, false, prefaces); + overallGrokPatternBuilder.append("%{").append(bestCandidate.grokPatternName).append(':') + .append(buildFieldName(fieldNameCountStore, bestCandidate.fieldName)).append('}'); + appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, isLast, epilogues); + } + } + + /** + * Given a collection of strings, and a grok pattern that matches some part of them all, + * return collections of the bits that come before (prefaces) and after (epilogues) the + * bit that matches. + */ + static void populatePrefacesAndEpilogues(Collection matchingStrings, Grok grok, Collection prefaces, + Collection epilogues) { + for (String s : matchingStrings) { + Map captures = grok.captures(s); + // If the pattern doesn't match then captures will be null. But we expect this + // method to only be called after validating that the pattern does match. + assert captures != null; + prefaces.add(captures.getOrDefault(PREFACE, "").toString()); + epilogues.add(captures.getOrDefault(EPILOGUE, "").toString()); + } + } + + /** + * The first time a particular field name is passed, simply return it. + * The second time return it with "2" appended. + * The third time return it with "3" appended. + * Etc. + */ + static String buildFieldName(Map fieldNameCountStore, String fieldName) { + Integer numberSeen = fieldNameCountStore.compute(fieldName, (k, v) -> 1 + ((v == null) ? 0 : v)); + if (numberSeen > 1) { + return fieldName + numberSeen; + } else { + return fieldName; + } + } + + static class GrokPatternCandidate { + + final String grokPatternName; + final String fieldName; + final Grok grok; + + /** + * Pre/post breaks default to \b, but this may not be appropriate for Grok patterns that start or + * end with a non "word" character (i.e. letter, number or underscore). For such patterns use one + * of the other constructors. + * + * In cases where the Grok pattern defined by Logstash already includes conditions on what must + * come before and after the match, use one of the other constructors and specify an empty string + * for the pre and/or post breaks. + * @param grokPatternName Name of the Grok pattern to try to match - must match one defined in Logstash. + * @param fieldName Name of the field to extract from the match. + */ + GrokPatternCandidate(String grokPatternName, String fieldName) { + this(grokPatternName, fieldName, "\\b", "\\b"); + } + + GrokPatternCandidate(String grokPatternName, String fieldName, String preBreak) { + this(grokPatternName, fieldName, preBreak, "\\b"); + } + + /** + * @param grokPatternName Name of the Grok pattern to try to match - must match one defined in Logstash. + * @param fieldName Name of the field to extract from the match. + * @param preBreak Only consider the match if it's broken from the previous text by this. + * @param postBreak Only consider the match if it's broken from the following text by this. + */ + GrokPatternCandidate(String grokPatternName, String fieldName, String preBreak, String postBreak) { + this.grokPatternName = grokPatternName; + this.fieldName = fieldName; + this.grok = new Grok(Grok.getBuiltinPatterns(), "%{DATA:" + PREFACE + "}" + preBreak + "%{" + grokPatternName + ":this}" + + postBreak + "%{GREEDYDATA:" + EPILOGUE + "}"); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java index 4b15ef36e6ac7..d7b10fb622bdf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java @@ -98,6 +98,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.ml.job.categorization.GrokPatternCreator; import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; @@ -486,7 +487,7 @@ private T parseSearchHit(SearchHit hit, BiFunction } } - private T parseGetHit(GetResponse getResponse, BiFunction objectParser, + private T parseGetHit(GetResponse getResponse, BiFunction objectParser, Consumer errorHandler) { BytesReference source = getResponse.getSourceAsBytesRef(); @@ -626,10 +627,11 @@ public void bucketRecords(String jobId, Bucket bucket, int from, int size, boole * Get a page of {@linkplain CategoryDefinition}s for the given jobId. * Uses a supplied client, so may run as the currently authenticated user * @param jobId the job id + * @param augment Should the category definition be augmented with a Grok pattern? * @param from Skip the first N categories. This parameter is for paging * @param size Take only this number of categories */ - public void categoryDefinitions(String jobId, Long categoryId, Integer from, Integer size, + public void categoryDefinitions(String jobId, Long categoryId, boolean augment, Integer from, Integer size, Consumer> handler, Consumer errorHandler, Client client) { if (categoryId != null && (from != null || size != null)) { @@ -663,6 +665,9 @@ public void categoryDefinitions(String jobId, Long categoryId, Integer from, Int XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { CategoryDefinition categoryDefinition = CategoryDefinition.LENIENT_PARSER.apply(parser, null); + if (augment) { + augmentWithGrokPattern(categoryDefinition); + } results.add(categoryDefinition); } catch (IOException e) { throw new ElasticsearchParseException("failed to parse category definition", e); @@ -674,6 +679,17 @@ public void categoryDefinitions(String jobId, Long categoryId, Integer from, Int }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetCategoriesAction.NAME))), client::search); } + void augmentWithGrokPattern(CategoryDefinition categoryDefinition) { + List examples = categoryDefinition.getExamples(); + String regex = categoryDefinition.getRegex(); + if (examples.isEmpty() || regex.isEmpty()) { + categoryDefinition.setGrokPattern(""); + } else { + categoryDefinition.setGrokPattern(GrokPatternCreator.findBestGrokMatchFromExamples(categoryDefinition.getJobId(), + regex, examples)); + } + } + /** * Search for anomaly records with the parameters in the * {@link RecordsQueryBuilder} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 484d1648fbbb2..09bb3f7591677 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -461,7 +461,7 @@ private QueryPage getCategoryDefinition(long categoryId) thr AtomicReference errorHolder = new AtomicReference<>(); AtomicReference> resultHolder = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - jobProvider.categoryDefinitions(JOB_ID, categoryId, null, null, r -> { + jobProvider.categoryDefinitions(JOB_ID, categoryId, false, null, null, r -> { resultHolder.set(r); latch.countDown(); }, e -> { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreatorTests.java new file mode 100644 index 0000000000000..4189dc35f0caa --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreatorTests.java @@ -0,0 +1,232 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.categorization; + +import org.elasticsearch.grok.Grok; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class GrokPatternCreatorTests extends ESTestCase { + + public void testBuildFieldName() { + Map fieldNameCountStore = new HashMap<>(); + assertEquals("field", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + assertEquals("field2", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + assertEquals("field3", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + assertEquals("timestamp", GrokPatternCreator.buildFieldName(fieldNameCountStore, "timestamp")); + assertEquals("field4", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + assertEquals("uri", GrokPatternCreator.buildFieldName(fieldNameCountStore, "uri")); + assertEquals("timestamp2", GrokPatternCreator.buildFieldName(fieldNameCountStore, "timestamp")); + assertEquals("field5", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + } + + public void testPopulatePrefacesAndEpiloguesGivenTimestamp() { + + Collection matchingStrings = Arrays.asList("[2018-01-25T15:33:23] DEBUG ", + "[2018-01-24T12:33:23] ERROR ", + "junk [2018-01-22T07:33:23] INFO ", + "[2018-01-21T03:33:23] DEBUG "); + Grok grok = new GrokPatternCreator.GrokPatternCandidate("TIMESTAMP_ISO8601", "timestamp").grok; + Collection prefaces = new ArrayList<>(); + Collection epilogues = new ArrayList<>(); + + GrokPatternCreator.populatePrefacesAndEpilogues(matchingStrings, grok, prefaces, epilogues); + + assertThat(prefaces, containsInAnyOrder("[", "[", "junk [", "[")); + assertThat(epilogues, containsInAnyOrder("] DEBUG ", "] ERROR ", "] INFO ", "] DEBUG ")); + } + + public void testPopulatePrefacesAndEpiloguesGivenEmailAddress() { + + Collection matchingStrings = Arrays.asList("before alice@acme.com after", + "abc bob@acme.com xyz", + "carol@acme.com"); + Grok grok = new GrokPatternCreator.GrokPatternCandidate("EMAILADDRESS", "email").grok; + Collection prefaces = new ArrayList<>(); + Collection epilogues = new ArrayList<>(); + + GrokPatternCreator.populatePrefacesAndEpilogues(matchingStrings, grok, prefaces, epilogues); + + assertThat(prefaces, containsInAnyOrder("before ", "abc ", "")); + assertThat(epilogues, containsInAnyOrder(" after", " xyz", "")); + } + + public void testAppendBestGrokMatchForStringsGivenTimestampsAndLogLevels() { + + Collection mustMatchStrings = Arrays.asList("[2018-01-25T15:33:23] DEBUG ", + "[2018-01-24T12:33:23] ERROR ", + "junk [2018-01-22T07:33:23] INFO ", + "[2018-01-21T03:33:23] DEBUG "); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".+?%{TIMESTAMP_ISO8601:timestamp}.+?%{LOGLEVEL:loglevel}.+?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenNumbersInBrackets() { + + Collection mustMatchStrings = Arrays.asList("(-2)", + " (-3)", + " (4)", + " (-5) "); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".+?%{NUMBER:field}.+?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenNegativeNumbersWithoutBreak() { + + Collection mustMatchStrings = Arrays.asList("before-2 ", + "prior to-3", + "-4"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + // It seems sensible that we don't detect these suffices as either base 10 or base 16 numbers + assertEquals(".+?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenHexNumbers() { + + Collection mustMatchStrings = Arrays.asList(" abc", + " 123", + " -123", + "1f is hex"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".*?%{BASE16NUM:field}.*?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenHostnamesWithNumbers() { + + Collection mustMatchStrings = Arrays.asList(" fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + // We don't want the .1. in the middle to get detected as a hex number + assertEquals(".+?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenEmailAddresses() { + + Collection mustMatchStrings = Arrays.asList("before alice@acme.com after", + "abc bob@acme.com xyz", + "carol@acme.com"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".*?%{EMAILADDRESS:email}.*?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenUris() { + + Collection mustMatchStrings = Arrays.asList("main site https://www.elastic.co/ with trailing slash", + "https://www.elastic.co/guide/en/x-pack/current/ml-configuring-categories.html#ml-configuring-categories is a section", + "download today from https://www.elastic.co/downloads"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".*?%{URI:uri}.*?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenPaths() { + + Collection mustMatchStrings = Arrays.asList("on Mac /Users/dave", + "on Windows C:\\Users\\dave", + "on Linux /home/dave"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".+?%{PATH:path}.*?", overallGrokPatternBuilder.toString()); + } + + public void testFindBestGrokMatchFromExamplesGivenNamedLogs() { + + String regex = ".*?linux.+?named.+?error.+?unexpected.+?RCODE.+?REFUSED.+?resolving.*"; + Collection examples = Arrays.asList( + "Sep 8 11:55:06 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'elastic.slack.com/A/IN': 95.110.64.205#53", + "Sep 8 11:55:08 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'slack-imgs.com/A/IN': 95.110.64.205#53", + "Sep 8 11:55:35 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'www.elastic.co/A/IN': 95.110.68.206#53", + "Sep 8 11:55:42 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'b.akamaiedge.net/A/IN': 95.110.64.205#53"); + + assertEquals(".*?%{SYSLOGTIMESTAMP:timestamp}.+?linux.+?named.+?%{NUMBER:field}.+?error.+?" + + "unexpected.+?RCODE.+?REFUSED.+?resolving.+?%{QUOTEDSTRING:field2}.+?%{IP:ipaddress}.+?%{NUMBER:field3}.*", + GrokPatternCreator.findBestGrokMatchFromExamples("foo", regex, examples)); + } + + public void testFindBestGrokMatchFromExamplesGivenCatalinaLogs() { + + String regex = ".*?org\\.apache\\.tomcat\\.util\\.http\\.Parameters.+?processParameters.+?WARNING.+?Parameters.+?" + + "Invalid.+?chunk.+?ignored.*"; + // The embedded newline ensures the regular expressions we're using are compiled with Pattern.DOTALL + Collection examples = Arrays.asList( + "Aug 29, 2009 12:03:33 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: " + + "Invalid chunk ignored.", + "Aug 29, 2009 12:03:40 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: " + + "Invalid chunk ignored.", + "Aug 29, 2009 12:03:45 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: " + + "Invalid chunk ignored.", + "Aug 29, 2009 12:03:57 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: " + + "Invalid chunk ignored."); + + assertEquals(".*?%{CATALINA_DATESTAMP:timestamp}.+?org\\.apache\\.tomcat\\.util\\.http\\.Parameters.+?processParameters.+?" + + "WARNING.+?Parameters.+?Invalid.+?chunk.+?ignored.*", + GrokPatternCreator.findBestGrokMatchFromExamples("foo", regex, examples)); + } + + public void testFindBestGrokMatchFromExamplesGivenMultiTimestampLogs() { + + String regex = ".*?Authpriv.+?Info.+?sshd.+?subsystem.+?request.+?for.+?sftp.*"; + // Two timestamps: one local, one UTC + Collection examples = Arrays.asList( + "559550912540598297\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\t" + + "Info\tsshd\tsubsystem request for sftp", + "559550912548986880\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t9049724\tserv02nw03\t10.120.48.147\tAuthpriv\t" + + "Info\tsshd\tsubsystem request for sftp", + "559550912548986887\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t884343\tserv02tw03\t192.168.121.189\tAuthpriv\t" + + "Info\tsshd\tsubsystem request for sftp", + "559550912603512850\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t8907014\tserv02nw01\t192.168.118.208\tAuthpriv\t" + + "Info\tsshd\tsubsystem request for sftp"); + + assertEquals(".*?%{NUMBER:field}.+?%{TIMESTAMP_ISO8601:timestamp}.+?%{TIMESTAMP_ISO8601:timestamp2}.+?%{NUMBER:field2}.+?" + + "%{IP:ipaddress}.+?Authpriv.+?Info.+?sshd.+?subsystem.+?request.+?for.+?sftp.*", + GrokPatternCreator.findBestGrokMatchFromExamples("foo", regex, examples)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java index 485fe44a95fa9..9fea904a99fa1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java @@ -61,7 +61,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -235,8 +234,7 @@ public void onFailure(Exception e) { }); } - public void testBuckets_OneBucketNoInterim() - throws InterruptedException, ExecutionException, IOException { + public void testBuckets_OneBucketNoInterim() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -268,8 +266,7 @@ public void testBuckets_OneBucketNoInterim() ".*")); } - public void testBuckets_OneBucketInterim() - throws InterruptedException, ExecutionException, IOException { + public void testBuckets_OneBucketInterim() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -302,8 +299,7 @@ public void testBuckets_OneBucketInterim() assertFalse(queryString.matches("(?s).*is_interim.*")); } - public void testBuckets_UsingBuilder() - throws InterruptedException, ExecutionException, IOException { + public void testBuckets_UsingBuilder() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -339,8 +335,7 @@ public void testBuckets_UsingBuilder() assertFalse(queryString.matches("(?s).*is_interim.*")); } - public void testBucket_NoBucketNoExpand() - throws InterruptedException, ExecutionException, IOException { + public void testBucket_NoBucketNoExpand() throws IOException { String jobId = "TestJobIdentification"; Long timestamp = 98765432123456789L; List> source = new ArrayList<>(); @@ -357,8 +352,7 @@ public void testBucket_NoBucketNoExpand() assertEquals(ResourceNotFoundException.class, holder[0].getClass()); } - public void testBucket_OneBucketNoExpand() - throws InterruptedException, ExecutionException, IOException { + public void testBucket_OneBucketNoExpand() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -384,7 +378,7 @@ public void testBucket_OneBucketNoExpand() assertEquals(now, b.getTimestamp()); } - public void testRecords() throws InterruptedException, ExecutionException, IOException { + public void testRecords() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -431,8 +425,7 @@ public void testRecords() throws InterruptedException, ExecutionException, IOExc assertEquals("irrascible", records.get(1).getFunction()); } - public void testRecords_UsingBuilder() - throws InterruptedException, ExecutionException, IOException { + public void testRecords_UsingBuilder() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -485,7 +478,7 @@ public void testRecords_UsingBuilder() assertEquals("irrascible", records.get(1).getFunction()); } - public void testBucketRecords() throws InterruptedException, ExecutionException, IOException { + public void testBucketRecords() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); Bucket bucket = mock(Bucket.class); @@ -532,7 +525,7 @@ public void testBucketRecords() throws InterruptedException, ExecutionException, assertEquals("irrascible", records.get(1).getFunction()); } - public void testexpandBucket() throws InterruptedException, ExecutionException, IOException { + public void testexpandBucket() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); Bucket bucket = new Bucket("foo", now, 22); @@ -559,8 +552,7 @@ public void testexpandBucket() throws InterruptedException, ExecutionException, assertEquals(400L, records); } - public void testCategoryDefinitions() - throws InterruptedException, ExecutionException, IOException { + public void testCategoryDefinitions() throws IOException { String jobId = "TestJobIdentification"; String terms = "the terms and conditions are not valid here"; List> source = new ArrayList<>(); @@ -580,15 +572,14 @@ public void testCategoryDefinitions() JobProvider provider = createProvider(client); @SuppressWarnings({"unchecked", "rawtypes"}) QueryPage[] holder = new QueryPage[1]; - provider.categoryDefinitions(jobId, null, from, size, r -> holder[0] = r, + provider.categoryDefinitions(jobId, null, false, from, size, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage categoryDefinitions = holder[0]; assertEquals(1L, categoryDefinitions.count()); assertEquals(terms, categoryDefinitions.results().get(0).getTerms()); } - public void testCategoryDefinition() - throws InterruptedException, ExecutionException, IOException { + public void testCategoryDefinition() throws IOException { String jobId = "TestJobIdentification"; String terms = "the terms and conditions are not valid here"; @@ -603,14 +594,14 @@ public void testCategoryDefinition() JobProvider provider = createProvider(client); @SuppressWarnings({"unchecked", "rawtypes"}) QueryPage[] holder = new QueryPage[1]; - provider.categoryDefinitions(jobId, categoryId, null, null, + provider.categoryDefinitions(jobId, categoryId, false, null, null, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage categoryDefinitions = holder[0]; assertEquals(1L, categoryDefinitions.count()); assertEquals(terms, categoryDefinitions.results().get(0).getTerms()); } - public void testInfluencers_NoInterim() throws InterruptedException, ExecutionException, IOException { + public void testInfluencers_NoInterim() throws IOException { String jobId = "TestJobIdentificationForInfluencers"; Date now = new Date(); List> source = new ArrayList<>(); @@ -670,7 +661,7 @@ public void testInfluencers_NoInterim() throws InterruptedException, ExecutionEx assertEquals(5.0, records.get(1).getInitialInfluencerScore(), 0.00001); } - public void testInfluencers_WithInterim() throws InterruptedException, ExecutionException, IOException { + public void testInfluencers_WithInterim() throws IOException { String jobId = "TestJobIdentificationForInfluencers"; Date now = new Date(); List> source = new ArrayList<>(); @@ -730,7 +721,7 @@ public void testInfluencers_WithInterim() throws InterruptedException, Execution assertEquals(5.0, records.get(1).getInitialInfluencerScore(), 0.00001); } - public void testModelSnapshots() throws InterruptedException, ExecutionException, IOException { + public void testModelSnapshots() throws IOException { String jobId = "TestJobIdentificationForInfluencers"; Date now = new Date(); List> source = new ArrayList<>(); @@ -851,8 +842,7 @@ private static GetResponse createGetResponse(boolean exists, Map return getResponse; } - private static SearchResponse createSearchResponse(List> source) - throws IOException { + private static SearchResponse createSearchResponse(List> source) throws IOException { SearchResponse response = mock(SearchResponse.class); List list = new ArrayList<>(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinitionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinitionTests.java index fdaa28508235a..ee7d4ad4b7add 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinitionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinitionTests.java @@ -25,6 +25,9 @@ public CategoryDefinition createTestInstance(String jobId) { categoryDefinition.setRegex(randomAlphaOfLength(10)); categoryDefinition.setMaxMatchingLength(randomLong()); categoryDefinition.setExamples(Arrays.asList(generateRandomStringArray(10, 10, false))); + if (randomBoolean()) { + categoryDefinition.setGrokPattern(randomAlphaOfLength(50)); + } return categoryDefinition; } From bf2fb210cc6e917df7f7e179948aa285ea14f405 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 15 May 2018 10:35:16 +0200 Subject: [PATCH 20/74] [Tests] Relax allowed delta in extended_stats aggregation (#30569) The order in which double values are added in java can give different results for the sum, so we need to allow a certain delta in the test assertions. The current value was still a bit too low, resulting in rare test failures. This change increases the allowed margin of error by a factor of ten. --- .../search/aggregations/metrics/InternalExtendedStatsTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java index 6178a72c83e3e..eb6a2e40a01b9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java @@ -84,7 +84,7 @@ protected void assertReduced(InternalExtendedStats reduced, List Date: Tue, 15 May 2018 14:12:30 +0530 Subject: [PATCH 21/74] [Docs] Improve section detailing translog usage (#30573) --- docs/reference/index-modules/translog.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/index-modules/translog.asciidoc b/docs/reference/index-modules/translog.asciidoc index b1eb36e346d9f..bed19bd5be1df 100644 --- a/docs/reference/index-modules/translog.asciidoc +++ b/docs/reference/index-modules/translog.asciidoc @@ -108,8 +108,8 @@ provide a command-line tool for this, `elasticsearch-translog`. [WARNING] The `elasticsearch-translog` tool should *not* be run while Elasticsearch is -running, and you will permanently lose the documents that were contained only in -the translog! +running. If you attempt to run this tool while Elasticsearch is running, you +will permanently lose the documents that were contained only in the translog! In order to run the `elasticsearch-translog` tool, specify the `truncate` subcommand as well as the directory for the corrupted translog with the `-d` From 1de5a3180a59e670b97820004d42a027409383fb Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 15 May 2018 09:45:37 +0100 Subject: [PATCH 22/74] [ML] Adjust BWC version following backport of #30125 --- .../xpack/core/ml/job/results/CategoryDefinition.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/CategoryDefinition.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/CategoryDefinition.java index 90d01f66f632b..7d5fb0a1bae0c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/CategoryDefinition.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/CategoryDefinition.java @@ -78,7 +78,7 @@ public CategoryDefinition(StreamInput in) throws IOException { regex = in.readString(); maxMatchingLength = in.readLong(); examples = new TreeSet<>(in.readList(StreamInput::readString)); - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { grokPattern = in.readOptionalString(); } } @@ -91,7 +91,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(regex); out.writeLong(maxMatchingLength); out.writeStringList(new ArrayList<>(examples)); - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeOptionalString(grokPattern); } } From af2b9dd779764f8c096a8af8ed3321dd31fb8a96 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 15 May 2018 11:20:57 +0200 Subject: [PATCH 23/74] Revert "Mute ML upgrade test (#30458)" This reverts commit 4b36ea74334e2664412af3540a64b82e0845e710. --- .../upgrades/UpgradeClusterClientYamlTestSuiteIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index 6040de8f50cda..c9ad4b3053cbe 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -8,7 +8,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; -import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -30,7 +29,6 @@ import static org.hamcrest.Matchers.is; @TimeoutSuite(millis = 5 * TimeUnits.MINUTE) // to account for slow as hell VMs -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30456") public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { /** From 098b3b7fb4a9964aec8530948d66424f4612db9e Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 15 May 2018 11:47:28 +0200 Subject: [PATCH 24/74] [DOCS] Remove references to removed changelog --- docs/reference/index-shared4.asciidoc | 2 +- docs/reference/release-notes/highlights-7.0.0.asciidoc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/index-shared4.asciidoc b/docs/reference/index-shared4.asciidoc index 3dfb3b641890f..3dc9e4f5e07cf 100644 --- a/docs/reference/index-shared4.asciidoc +++ b/docs/reference/index-shared4.asciidoc @@ -7,4 +7,4 @@ include::glossary.asciidoc[] include::release-notes/highlights.asciidoc[] -include::{docdir}/../CHANGELOG.asciidoc[] \ No newline at end of file +include::release-notes.asciidoc[] \ No newline at end of file diff --git a/docs/reference/release-notes/highlights-7.0.0.asciidoc b/docs/reference/release-notes/highlights-7.0.0.asciidoc index 1ea3d3fa3291e..d01d543c8257e 100644 --- a/docs/reference/release-notes/highlights-7.0.0.asciidoc +++ b/docs/reference/release-notes/highlights-7.0.0.asciidoc @@ -6,4 +6,4 @@ coming[7.0.0] -See also <> and <>. +See also <> and <>. From b50cf3c6b09a29c218999036f25a9b4e0739a7c9 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 15 May 2018 11:51:54 +0200 Subject: [PATCH 25/74] Side-step pending deletes check (#30571) When we split/shrink an index we open several IndexWriter instances causeing file-deletes to be pending on windows. This subsequently fails when we open an IW to bootstrap the index history due to pending deletes. This change sidesteps the check since we know our history goes forward in terms of files and segments. Closes #30416 --- .../rest-api-spec/test/indices.split/10_basic.yml | 7 ++----- .../test/indices.split/20_source_mapping.yml | 7 ++----- .../java/org/elasticsearch/index/store/Store.java | 15 +++++++++++++-- .../admin/indices/create/ShrinkIndexIT.java | 2 -- .../action/admin/indices/create/SplitIndexIT.java | 2 -- 5 files changed, 17 insertions(+), 16 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml index 4f645d3eb3e0b..8cfe77042dd3f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml @@ -107,11 +107,8 @@ setup: --- "Split from 1 to N": - skip: - # when re-enabling uncomment the below skips - version: "all" - reason: "AwaitsFix'ing, see https://github.com/elastic/elasticsearch/issues/30503" - # version: " - 6.3.99" - # reason: expects warnings that pre-6.4.0 will not send + version: " - 6.99.99" + reason: Automatic preparation for splitting was added in 7.0.0 features: "warnings" - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml index 4bac4bf5b0807..88d3f3c610202 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml @@ -1,11 +1,8 @@ --- "Split index ignores target template mapping": - skip: - # when re-enabling uncomment the below skips - version: "all" - reason: "AwaitsFix'ing, see https://github.com/elastic/elasticsearch/issues/30503" - # version: " - 6.3.99" - # reason: expects warnings that pre-6.4.0 will not send + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send features: "warnings" # create index diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index de29386022cc6..0374d74dcf58b 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -731,13 +731,13 @@ static final class StoreDirectory extends FilterDirectory { private final Logger deletesLogger; - StoreDirectory(Directory delegateDirectory, Logger deletesLogger) throws IOException { + StoreDirectory(Directory delegateDirectory, Logger deletesLogger) { super(delegateDirectory); this.deletesLogger = deletesLogger; } @Override - public void close() throws IOException { + public void close() { assert false : "Nobody should close this directory except of the Store itself"; } @@ -759,6 +759,17 @@ private void innerClose() throws IOException { public String toString() { return "store(" + in.toString() + ")"; } + + @Override + public boolean checkPendingDeletions() throws IOException { + if (super.checkPendingDeletions()) { + deletesLogger.warn("directory has still pending deletes"); + } + // we skip this check since our IW usage always goes forward. + // we still might run into situations where we have pending deletes ie. in shrink / split case + // and that will cause issues on windows since we open multiple IW instance one after another during the split/shrink recovery + return false; + } } /** diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index 8443ac2bf2e3d..e48f151081f62 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; @@ -77,7 +76,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30416") public class ShrinkIndexIT extends ESIntegTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index a7f7ed6f52546..fe6e980ab4259 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.join.ScoreMode; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -81,7 +80,6 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30416") public class SplitIndexIT extends ESIntegTestCase { @Override From e1d675c690b0b4d5d224205968c463d7a0dc7525 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 15 May 2018 12:36:51 +0200 Subject: [PATCH 26/74] [DOCS] Remove references to changelog and to highlights highlights reference the changelog and it currently breaks the docs. This aligns changes in master with the ones made in other branches. --- docs/reference/index-shared4.asciidoc | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docs/reference/index-shared4.asciidoc b/docs/reference/index-shared4.asciidoc index 3dc9e4f5e07cf..f4e87b4e9e8fc 100644 --- a/docs/reference/index-shared4.asciidoc +++ b/docs/reference/index-shared4.asciidoc @@ -4,7 +4,3 @@ include::how-to.asciidoc[] include::testing.asciidoc[] include::glossary.asciidoc[] - -include::release-notes/highlights.asciidoc[] - -include::release-notes.asciidoc[] \ No newline at end of file From 17d65c1f06cd2d3dabad6ef42a04f3899d3b2de8 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 15 May 2018 13:48:15 +0300 Subject: [PATCH 27/74] Unmute IndexUpgradeIT tests After additional fixes to the Jenkins job, these tests can now be unmuted. Closes #30430 --- .../java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index 9f1fb95ed4835..ef5c3acc3d238 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.upgrade; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.search.SearchResponse; @@ -31,7 +30,6 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.core.IsEqual.equalTo; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30430") public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { @Before From d1c28c60fc004234368ce9e55bb3c1023a1e6dc9 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 15 May 2018 08:06:58 -0400 Subject: [PATCH 28/74] HLRestClient: Follow-up for put index template api (#30592) This commit addresses some comments given after the original PR was in. Follow-up #30400 --- .../template/put/PutIndexTemplateRequest.java | 16 +++-- .../put/PutIndexTemplateRequestTests.java | 62 +++++++++++-------- .../put/PutIndexTemplateResponseTests.java | 45 ++++++++++++++ 3 files changed, 92 insertions(+), 31 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponseTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index b018e24a565b8..5d4e558dbb25b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; @@ -45,6 +46,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import java.io.IOException; @@ -543,9 +545,6 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (customs.isEmpty() == false) { - throw new IllegalArgumentException("Custom data type is no longer supported in index template [" + customs + "]"); - } builder.field("index_patterns", indexPatterns); builder.field("order", order); if (version != null) { @@ -558,8 +557,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject("mappings"); for (Map.Entry entry : mappings.entrySet()) { - Map mapping = XContentHelper.convertToMap(new BytesArray(entry.getValue()), false).v2(); - builder.field(entry.getKey(), mapping); + builder.field(entry.getKey()); + XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, entry.getValue()); + builder.copyCurrentStructure(parser); } builder.endObject(); @@ -568,6 +569,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws alias.toXContent(builder, params); } builder.endObject(); + + for (Map.Entry entry : customs.entrySet()) { + builder.field(entry.getKey(), entry.getValue(), params); + } + return builder; } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java index 294213452596f..577a8b55e61a3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java @@ -23,18 +23,18 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.Arrays; import java.util.Base64; import java.util.Collections; @@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; -public class PutIndexTemplateRequestTests extends ESTestCase { +public class PutIndexTemplateRequestTests extends AbstractXContentTestCase { // bwc for #21009 public void testPutIndexTemplateRequest510() throws IOException { @@ -137,13 +137,14 @@ public void testValidateErrorMessage() throws Exception { assertThat(noError, is(nullValue())); } - private PutIndexTemplateRequest randomPutIndexTemplateRequest() throws IOException { + @Override + protected PutIndexTemplateRequest createTestInstance() { PutIndexTemplateRequest request = new PutIndexTemplateRequest(); request.name("test"); - if (randomBoolean()){ + if (randomBoolean()) { request.version(randomInt()); } - if (randomBoolean()){ + if (randomBoolean()) { request.order(randomInt()); } request.patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false))); @@ -159,30 +160,39 @@ private PutIndexTemplateRequest randomPutIndexTemplateRequest() throws IOExcepti request.alias(alias); } if (randomBoolean()) { - request.mapping("doc", XContentFactory.jsonBuilder().startObject() - .startObject("doc").startObject("properties") - .startObject("field-" + randomInt()).field("type", randomFrom("keyword", "text")).endObject() - .endObject().endObject().endObject()); + try { + request.mapping("doc", XContentFactory.jsonBuilder().startObject() + .startObject("doc").startObject("properties") + .startObject("field-" + randomInt()).field("type", randomFrom("keyword", "text")).endObject() + .endObject().endObject().endObject()); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } } - if (randomBoolean()){ + if (randomBoolean()) { request.settings(Settings.builder().put("setting1", randomLong()).put("setting2", randomTimeValue()).build()); } return request; } - public void testFromToXContentPutTemplateRequest() throws Exception { - for (int i = 0; i < 10; i++) { - PutIndexTemplateRequest expected = randomPutIndexTemplateRequest(); - XContentType xContentType = randomFrom(XContentType.values()); - BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); - PutIndexTemplateRequest parsed = new PutIndexTemplateRequest().source(shuffled, xContentType); - assertNotSame(expected, parsed); - assertThat(parsed.version(), equalTo(expected.version())); - assertThat(parsed.order(), equalTo(expected.order())); - assertThat(parsed.patterns(), equalTo(expected.patterns())); - assertThat(parsed.aliases(), equalTo(expected.aliases())); - assertThat(parsed.mappings(), equalTo(expected.mappings())); - assertThat(parsed.settings(), equalTo(expected.settings())); - } + @Override + protected PutIndexTemplateRequest doParseInstance(XContentParser parser) throws IOException { + return new PutIndexTemplateRequest().source(parser.map()); + } + + @Override + protected void assertEqualInstances(PutIndexTemplateRequest expected, PutIndexTemplateRequest actual) { + assertNotSame(expected, actual); + assertThat(actual.version(), equalTo(expected.version())); + assertThat(actual.order(), equalTo(expected.order())); + assertThat(actual.patterns(), equalTo(expected.patterns())); + assertThat(actual.aliases(), equalTo(expected.aliases())); + assertThat(actual.mappings(), equalTo(expected.mappings())); + assertThat(actual.settings(), equalTo(expected.settings())); + } + + @Override + protected boolean supportsUnknownFields() { + return false; } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponseTests.java new file mode 100644 index 0000000000000..096d62bf2bb5b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponseTests.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.template.put; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +public class PutIndexTemplateResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected PutIndexTemplateResponse doParseInstance(XContentParser parser) { + return PutIndexTemplateResponse.fromXContent(parser); + } + + @Override + protected PutIndexTemplateResponse createTestInstance() { + return new PutIndexTemplateResponse(randomBoolean()); + } + + @Override + protected PutIndexTemplateResponse createBlankInstance() { + return new PutIndexTemplateResponse(); + } + + @Override + protected PutIndexTemplateResponse mutateInstance(PutIndexTemplateResponse response) { + return new PutIndexTemplateResponse(response.isAcknowledged() == false); + } +} From 801973fa9f225ddce9cac707251110659dcdc760 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 15 May 2018 18:22:58 +0300 Subject: [PATCH 29/74] Repository GCS plugin new client library (#30168) This does away with the deprecated `com.google.api-client:google-api-client:1.23` and replaces it with `com.google.cloud:google-cloud-storage:1.28.0`. It also changes security permissions for the repository-gcs plugin. --- docs/plugins/repository-gcs.asciidoc | 12 +- plugins/repository-gcs/build.gradle | 213 +++++- .../licenses/api-common-1.5.0.jar.sha1 | 1 + .../licenses/api-common-LICENSE.txt | 27 + .../licenses/api-common-NOTICE.txt | 0 .../licenses/commons-codec-1.10.jar.sha1 | 1 - .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/gax-1.25.0.jar.sha1 | 1 + .../repository-gcs/licenses/gax-LICENSE.txt | 27 + .../repository-gcs/licenses/gax-NOTICE.txt | 0 .../licenses/gax-httpjson-0.40.0.jar.sha1 | 1 + .../licenses/gax-httpjson-LICENSE.txt | 27 + .../licenses/gax-httpjson-NOTICE.txt | 0 .../licenses/google-api-client-LICENSE.txt | 201 ++++++ .../licenses/google-api-client-NOTICE.txt | 0 ...api-services-storage-v1-rev115-LICENSE.txt | 201 ++++++ ...-api-services-storage-v1-rev115-NOTICE.txt | 0 .../licenses/google-auth-LICENSE.txt | 28 + .../licenses/google-auth-NOTICE.txt | 0 ...le-auth-library-credentials-0.9.1.jar.sha1 | 1 + ...le-auth-library-oauth2-http-0.9.1.jar.sha1 | 1 + .../licenses/google-cloud-LICENSE.txt | 201 ++++++ .../licenses/google-cloud-NOTICE.txt | 0 .../google-cloud-core-1.28.0.jar.sha1 | 1 + .../google-cloud-core-http-1.28.0.jar.sha1 | 1 + .../google-cloud-storage-1.28.0.jar.sha1 | 1 + ...le-LICENSE.txt => google-http-LICENSE.txt} | 0 .../licenses/google-http-NOTICE.txt | 0 ...ogle-http-client-appengine-1.23.0.jar.sha1 | 1 + ...google-http-client-jackson-1.23.0.jar.sha1 | 1 + .../licenses/google-oauth-client-LICENSE.txt | 28 + .../licenses/google-oauth-client-NOTICE.txt | 0 .../licenses/grpc-context-1.9.0.jar.sha1 | 1 + ...c-LICENSE.txt => grpc-context-LICENSE.txt} | 0 .../licenses/grpc-context-NOTICE.txt | 0 .../licenses/guava-20.0.jar.sha1 | 1 + .../repository-gcs/licenses/guava-LICENSE.txt | 202 ++++++ .../repository-gcs/licenses/guava-NOTICE.txt | 0 .../licenses/httpclient-4.5.2.jar.sha1 | 1 - .../licenses/httpcore-4.4.5.jar.sha1 | 1 - .../licenses/jackson-core-asl-1.9.13.jar.sha1 | 1 + .../licenses/jackson-core-asl-LICENSE.txt | 202 ++++++ .../licenses/jackson-core-asl-NOTICE.txt | 0 .../licenses/old/commons-codec-LICENSE.txt | 202 ++++++ .../{ => old}/commons-codec-NOTICE.txt | 0 .../{ => old}/commons-logging-LICENSE.txt | 0 .../{ => old}/commons-logging-NOTICE.txt | 0 .../licenses/old/google-LICENSE.txt | 201 ++++++ .../licenses/{ => old}/google-NOTICE.txt | 0 .../licenses/{ => old}/httpclient-LICENSE.txt | 0 .../licenses/{ => old}/httpclient-NOTICE.txt | 0 .../licenses/{ => old}/httpcore-LICENSE.txt | 0 .../licenses/{ => old}/httpcore-NOTICE.txt | 0 .../licenses/opencensus-LICENSE.txt | 202 ++++++ .../licenses/opencensus-NOTICE.txt | 0 .../licenses/opencensus-api-0.11.1.jar.sha1 | 1 + ...encensus-contrib-http-util-0.11.1.jar.sha1 | 1 + .../proto-google-common-protos-1.8.0.jar.sha1 | 1 + .../proto-google-common-protos-LICENSE.txt | 202 ++++++ .../proto-google-common-protos-NOTICE.txt | 0 .../licenses/threetenbp-1.3.6.jar.sha1 | 1 + .../licenses/threetenbp-LICENSE.txt | 31 + .../licenses/threetenbp-NOTICE.txt | 0 .../qa/google-cloud-storage/build.gradle | 8 +- .../gcs/GoogleCloudStorageTestServer.java | 346 +++++++--- .../gcs/GoogleCloudStorageBlobStore.java | 394 +++++------ .../gcs/GoogleCloudStorageClientSettings.java | 100 ++- .../gcs/GoogleCloudStoragePlugin.java | 78 +-- .../gcs/GoogleCloudStorageRepository.java | 5 +- .../gcs/GoogleCloudStorageService.java | 175 ++--- .../plugin-metadata/plugin-security.policy | 7 +- .../cloud/storage/StorageRpcOptionUtils.java | 54 ++ .../cloud/storage/StorageTestUtils.java | 37 ++ ...eCloudStorageBlobStoreRepositoryTests.java | 2 +- ...GoogleCloudStorageClientSettingsTests.java | 132 ++-- .../gcs/GoogleCloudStorageServiceTests.java | 104 ++- .../repositories/gcs/MockStorage.java | 627 ++++++++++++------ 77 files changed, 3398 insertions(+), 899 deletions(-) create mode 100644 plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/api-common-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/api-common-NOTICE.txt delete mode 100644 plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/gax-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/gax-httpjson-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-api-client-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-api-client-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-auth-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-auth-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-cloud-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 rename plugins/repository-gcs/licenses/{google-LICENSE.txt => google-http-LICENSE.txt} (100%) create mode 100644 plugins/repository-gcs/licenses/google-http-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-oauth-client-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 rename plugins/repository-gcs/licenses/{commons-codec-LICENSE.txt => grpc-context-LICENSE.txt} (100%) create mode 100644 plugins/repository-gcs/licenses/grpc-context-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/guava-20.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/guava-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/guava-NOTICE.txt delete mode 100644 plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt rename plugins/repository-gcs/licenses/{ => old}/commons-codec-NOTICE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/commons-logging-LICENSE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/commons-logging-NOTICE.txt (100%) create mode 100644 plugins/repository-gcs/licenses/old/google-LICENSE.txt rename plugins/repository-gcs/licenses/{ => old}/google-NOTICE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/httpclient-LICENSE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/httpclient-NOTICE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/httpcore-LICENSE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/httpcore-NOTICE.txt (100%) create mode 100644 plugins/repository-gcs/licenses/opencensus-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/opencensus-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/threetenbp-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/threetenbp-NOTICE.txt create mode 100644 plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java create mode 100644 plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc index a51200fb7fef0..8cf2bc0a73c92 100644 --- a/docs/plugins/repository-gcs.asciidoc +++ b/docs/plugins/repository-gcs.asciidoc @@ -84,11 +84,7 @@ A service account file looks like this: "private_key_id": "...", "private_key": "-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n", "client_email": "service-account-for-your-repository@your-project-id.iam.gserviceaccount.com", - "client_id": "...", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://accounts.google.com/o/oauth2/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_x509_cert_url": "..." + "client_id": "..." } ---- // NOTCONSOLE @@ -178,6 +174,12 @@ are marked as `Secure`. a custom name can be useful to authenticate your cluster when requests statistics are logged in the Google Cloud Platform. Default to `repository-gcs` +`project_id`:: + + The Google Cloud project id. This will be automatically infered from the credentials file but + can be specified explicitly. For example, it can be used to switch between projects when the + same credentials are usable for both the production and the development projects. + [[repository-gcs-repository]] ==== Repository Settings diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index e164a8553f81f..07ef4b4be5e62 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -22,38 +22,207 @@ esplugin { classname 'org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin' } -versions << [ - 'google': '1.23.0', -] - dependencies { - compile "com.google.apis:google-api-services-storage:v1-rev115-${versions.google}" - compile "com.google.api-client:google-api-client:${versions.google}" - compile "com.google.oauth-client:google-oauth-client:${versions.google}" - compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" - compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" - compile "commons-logging:commons-logging:${versions.commonslogging}" - compile "commons-codec:commons-codec:${versions.commonscodec}" - compile "com.google.http-client:google-http-client:${versions.google}" - compile "com.google.http-client:google-http-client-jackson2:${versions.google}" + compile 'com.google.cloud:google-cloud-storage:1.28.0' + compile 'com.google.cloud:google-cloud-core:1.28.0' + compile 'com.google.cloud:google-cloud-core-http:1.28.0' + compile 'com.google.auth:google-auth-library-oauth2-http:0.9.1' + compile 'com.google.auth:google-auth-library-credentials:0.9.1' + compile 'com.google.oauth-client:google-oauth-client:1.23.0' + compile 'com.google.http-client:google-http-client:1.23.0' + compile 'com.google.http-client:google-http-client-jackson:1.23.0' + compile 'com.google.http-client:google-http-client-jackson2:1.23.0' + compile 'com.google.http-client:google-http-client-appengine:1.23.0' + compile 'com.google.api-client:google-api-client:1.23.0' + compile 'com.google.api:gax:1.25.0' + compile 'com.google.api:gax-httpjson:0.40.0' + compile 'com.google.api:api-common:1.5.0' + compile 'com.google.api.grpc:proto-google-common-protos:1.8.0' + compile 'com.google.guava:guava:20.0' + compile 'com.google.apis:google-api-services-storage:v1-rev115-1.23.0' + compile 'org.codehaus.jackson:jackson-core-asl:1.9.13' + compile 'io.grpc:grpc-context:1.9.0' + compile 'io.opencensus:opencensus-api:0.11.1' + compile 'io.opencensus:opencensus-contrib-http-util:0.11.1' + compile 'org.threeten:threetenbp:1.3.6' } dependencyLicenses { - mapping from: /google-.*/, to: 'google' + mapping from: /google-cloud-.*/, to: 'google-cloud' + mapping from: /google-auth-.*/, to: 'google-auth' + mapping from: /google-http-.*/, to: 'google-http' + mapping from: /opencensus.*/, to: 'opencensus' } thirdPartyAudit.excludes = [ + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', // classes are missing - 'com.google.common.base.Splitter', - 'com.google.common.collect.Lists', - 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener', - 'org.apache.avalon.framework.logger.Logger', - 'org.apache.log.Hierarchy', - 'org.apache.log.Logger', + 'com.google.appengine.api.datastore.Blob', + 'com.google.appengine.api.datastore.DatastoreService', + 'com.google.appengine.api.datastore.DatastoreServiceFactory', + 'com.google.appengine.api.datastore.Entity', + 'com.google.appengine.api.datastore.Key', + 'com.google.appengine.api.datastore.KeyFactory', + 'com.google.appengine.api.datastore.PreparedQuery', + 'com.google.appengine.api.datastore.Query', + 'com.google.appengine.api.memcache.Expiration', + 'com.google.appengine.api.memcache.MemcacheService', + 'com.google.appengine.api.memcache.MemcacheServiceFactory', + 'com.google.appengine.api.urlfetch.FetchOptions$Builder', + 'com.google.appengine.api.urlfetch.FetchOptions', + 'com.google.appengine.api.urlfetch.HTTPHeader', + 'com.google.appengine.api.urlfetch.HTTPMethod', + 'com.google.appengine.api.urlfetch.HTTPRequest', + 'com.google.appengine.api.urlfetch.HTTPResponse', + 'com.google.appengine.api.urlfetch.URLFetchService', + 'com.google.appengine.api.urlfetch.URLFetchServiceFactory', + 'com.google.gson.Gson', + 'com.google.gson.GsonBuilder', + 'com.google.gson.TypeAdapter', + 'com.google.gson.stream.JsonReader', + 'com.google.gson.stream.JsonWriter', + 'com.google.iam.v1.Binding$Builder', + 'com.google.iam.v1.Binding', + 'com.google.iam.v1.Policy$Builder', + 'com.google.iam.v1.Policy', + 'com.google.protobuf.AbstractMessageLite$Builder', + 'com.google.protobuf.AbstractParser', + 'com.google.protobuf.Any$Builder', + 'com.google.protobuf.Any', + 'com.google.protobuf.AnyOrBuilder', + 'com.google.protobuf.AnyProto', + 'com.google.protobuf.Api$Builder', + 'com.google.protobuf.Api', + 'com.google.protobuf.ApiOrBuilder', + 'com.google.protobuf.ApiProto', + 'com.google.protobuf.ByteString', + 'com.google.protobuf.CodedInputStream', + 'com.google.protobuf.CodedOutputStream', + 'com.google.protobuf.DescriptorProtos', + 'com.google.protobuf.Descriptors$Descriptor', + 'com.google.protobuf.Descriptors$EnumDescriptor', + 'com.google.protobuf.Descriptors$EnumValueDescriptor', + 'com.google.protobuf.Descriptors$FieldDescriptor', + 'com.google.protobuf.Descriptors$FileDescriptor$InternalDescriptorAssigner', + 'com.google.protobuf.Descriptors$FileDescriptor', + 'com.google.protobuf.Descriptors$OneofDescriptor', + 'com.google.protobuf.Duration$Builder', + 'com.google.protobuf.Duration', + 'com.google.protobuf.DurationOrBuilder', + 'com.google.protobuf.DurationProto', + 'com.google.protobuf.EmptyProto', + 'com.google.protobuf.Enum$Builder', + 'com.google.protobuf.Enum', + 'com.google.protobuf.EnumOrBuilder', + 'com.google.protobuf.ExtensionRegistry', + 'com.google.protobuf.ExtensionRegistryLite', + 'com.google.protobuf.FloatValue$Builder', + 'com.google.protobuf.FloatValue', + 'com.google.protobuf.FloatValueOrBuilder', + 'com.google.protobuf.GeneratedMessage$GeneratedExtension', + 'com.google.protobuf.GeneratedMessage', + 'com.google.protobuf.GeneratedMessageV3$Builder', + 'com.google.protobuf.GeneratedMessageV3$BuilderParent', + 'com.google.protobuf.GeneratedMessageV3$FieldAccessorTable', + 'com.google.protobuf.GeneratedMessageV3', + 'com.google.protobuf.Internal$EnumLite', + 'com.google.protobuf.Internal$EnumLiteMap', + 'com.google.protobuf.Internal', + 'com.google.protobuf.InvalidProtocolBufferException', + 'com.google.protobuf.LazyStringArrayList', + 'com.google.protobuf.LazyStringList', + 'com.google.protobuf.MapEntry$Builder', + 'com.google.protobuf.MapEntry', + 'com.google.protobuf.MapField', + 'com.google.protobuf.Message', + 'com.google.protobuf.MessageOrBuilder', + 'com.google.protobuf.Parser', + 'com.google.protobuf.ProtocolMessageEnum', + 'com.google.protobuf.ProtocolStringList', + 'com.google.protobuf.RepeatedFieldBuilderV3', + 'com.google.protobuf.SingleFieldBuilderV3', + 'com.google.protobuf.Struct$Builder', + 'com.google.protobuf.Struct', + 'com.google.protobuf.StructOrBuilder', + 'com.google.protobuf.StructProto', + 'com.google.protobuf.Timestamp$Builder', + 'com.google.protobuf.Timestamp', + 'com.google.protobuf.TimestampProto', + 'com.google.protobuf.Type$Builder', + 'com.google.protobuf.Type', + 'com.google.protobuf.TypeOrBuilder', + 'com.google.protobuf.TypeProto', + 'com.google.protobuf.UInt32Value$Builder', + 'com.google.protobuf.UInt32Value', + 'com.google.protobuf.UInt32ValueOrBuilder', + 'com.google.protobuf.UnknownFieldSet$Builder', + 'com.google.protobuf.UnknownFieldSet', + 'com.google.protobuf.WireFormat$FieldType', + 'com.google.protobuf.WrappersProto', + 'com.google.protobuf.util.Timestamps', + 'org.apache.http.ConnectionReuseStrategy', + 'org.apache.http.Header', + 'org.apache.http.HttpEntity', + 'org.apache.http.HttpEntityEnclosingRequest', + 'org.apache.http.HttpHost', + 'org.apache.http.HttpRequest', + 'org.apache.http.HttpResponse', + 'org.apache.http.HttpVersion', + 'org.apache.http.RequestLine', + 'org.apache.http.StatusLine', + 'org.apache.http.client.AuthenticationHandler', + 'org.apache.http.client.HttpClient', + 'org.apache.http.client.HttpRequestRetryHandler', + 'org.apache.http.client.RedirectHandler', + 'org.apache.http.client.RequestDirector', + 'org.apache.http.client.UserTokenHandler', + 'org.apache.http.client.methods.HttpDelete', + 'org.apache.http.client.methods.HttpEntityEnclosingRequestBase', + 'org.apache.http.client.methods.HttpGet', + 'org.apache.http.client.methods.HttpHead', + 'org.apache.http.client.methods.HttpOptions', + 'org.apache.http.client.methods.HttpPost', + 'org.apache.http.client.methods.HttpPut', + 'org.apache.http.client.methods.HttpRequestBase', + 'org.apache.http.client.methods.HttpTrace', + 'org.apache.http.conn.ClientConnectionManager', + 'org.apache.http.conn.ConnectionKeepAliveStrategy', + 'org.apache.http.conn.params.ConnManagerParams', + 'org.apache.http.conn.params.ConnPerRouteBean', + 'org.apache.http.conn.params.ConnRouteParams', + 'org.apache.http.conn.routing.HttpRoutePlanner', + 'org.apache.http.conn.scheme.PlainSocketFactory', + 'org.apache.http.conn.scheme.Scheme', + 'org.apache.http.conn.scheme.SchemeRegistry', + 'org.apache.http.conn.ssl.SSLSocketFactory', + 'org.apache.http.conn.ssl.X509HostnameVerifier', + 'org.apache.http.entity.AbstractHttpEntity', + 'org.apache.http.impl.client.DefaultHttpClient', + 'org.apache.http.impl.client.DefaultHttpRequestRetryHandler', + 'org.apache.http.impl.conn.ProxySelectorRoutePlanner', + 'org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager', + 'org.apache.http.message.BasicHttpResponse', + 'org.apache.http.params.BasicHttpParams', + 'org.apache.http.params.HttpConnectionParams', + 'org.apache.http.params.HttpParams', + 'org.apache.http.params.HttpProtocolParams', + 'org.apache.http.protocol.HttpContext', + 'org.apache.http.protocol.HttpProcessor', + 'org.apache.http.protocol.HttpRequestExecutor' ] check { // also execute the QA tests when testing the plugin dependsOn 'qa:google-cloud-storage:check' -} \ No newline at end of file +} diff --git a/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 b/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 new file mode 100644 index 0000000000000..64435356e5eaf --- /dev/null +++ b/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 @@ -0,0 +1 @@ +7e537338d40a57ad469239acb6d828fa544fb52b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/api-common-LICENSE.txt b/plugins/repository-gcs/licenses/api-common-LICENSE.txt new file mode 100644 index 0000000000000..6d16b6578a2f0 --- /dev/null +++ b/plugins/repository-gcs/licenses/api-common-LICENSE.txt @@ -0,0 +1,27 @@ +Copyright 2016, Google Inc. +All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/api-common-NOTICE.txt b/plugins/repository-gcs/licenses/api-common-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 b/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 deleted file mode 100644 index 3fe8682a1b0f9..0000000000000 --- a/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4b95f4897fa13f2cd904aee711aeafc0c5295cd8 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index 5b8f029e58293..0000000000000 --- a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 new file mode 100644 index 0000000000000..594177047c140 --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 @@ -0,0 +1 @@ +36ab73c0b5d4a67447eb89a3174cc76ced150bd1 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-LICENSE.txt b/plugins/repository-gcs/licenses/gax-LICENSE.txt new file mode 100644 index 0000000000000..267561bb386de --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-LICENSE.txt @@ -0,0 +1,27 @@ +Copyright 2016, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/gax-NOTICE.txt b/plugins/repository-gcs/licenses/gax-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 new file mode 100644 index 0000000000000..c251ea1dd956c --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 @@ -0,0 +1 @@ +cb4bafbfd45b9d24efbb6138a31e37918fac015f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt b/plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt new file mode 100644 index 0000000000000..267561bb386de --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt @@ -0,0 +1,27 @@ +Copyright 2016, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/gax-httpjson-NOTICE.txt b/plugins/repository-gcs/licenses/gax-httpjson-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-api-client-LICENSE.txt b/plugins/repository-gcs/licenses/google-api-client-LICENSE.txt new file mode 100644 index 0000000000000..4eedc0116add7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-client-LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-api-client-NOTICE.txt b/plugins/repository-gcs/licenses/google-api-client-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt new file mode 100644 index 0000000000000..4eedc0116add7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-auth-LICENSE.txt b/plugins/repository-gcs/licenses/google-auth-LICENSE.txt new file mode 100644 index 0000000000000..12edf23c6711f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2014, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/google-auth-NOTICE.txt b/plugins/repository-gcs/licenses/google-auth-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 new file mode 100644 index 0000000000000..0922a53d2e356 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 @@ -0,0 +1 @@ +25e0f45f3b3d1b4fccc8944845e51a7a4f359652 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 new file mode 100644 index 0000000000000..100a44c187218 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 @@ -0,0 +1 @@ +c0fe3a39b0f28d59de1986b3c50f018cd7cb9ec2 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-LICENSE.txt b/plugins/repository-gcs/licenses/google-cloud-LICENSE.txt new file mode 100644 index 0000000000000..4eedc0116add7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-cloud-NOTICE.txt b/plugins/repository-gcs/licenses/google-cloud-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 new file mode 100644 index 0000000000000..071533f227839 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 @@ -0,0 +1 @@ +c0e88c78ce17c92d76bf46345faf3fa68833b216 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 new file mode 100644 index 0000000000000..fed3fc257c32c --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 @@ -0,0 +1 @@ +7b4559a9513abd98da50958c56a10f8ae00cb0f7 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 new file mode 100644 index 0000000000000..f49152ea05646 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 @@ -0,0 +1 @@ +226019ae816b42c59f1b06999aeeb73722b87200 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-LICENSE.txt b/plugins/repository-gcs/licenses/google-http-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/google-LICENSE.txt rename to plugins/repository-gcs/licenses/google-http-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/google-http-NOTICE.txt b/plugins/repository-gcs/licenses/google-http-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 new file mode 100644 index 0000000000000..823c3a85089a5 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 @@ -0,0 +1 @@ +0eda0d0f758c1cc525866e52e1226c4eb579d130 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 new file mode 100644 index 0000000000000..85ba0ab798d05 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 @@ -0,0 +1 @@ +a72ea3a197937ef63a893e73df312dac0d813663 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt b/plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt new file mode 100644 index 0000000000000..12edf23c6711f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2014, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/google-oauth-client-NOTICE.txt b/plugins/repository-gcs/licenses/google-oauth-client-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..02bac0e492074 --- /dev/null +++ b/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 @@ -0,0 +1 @@ +28b0836f48c9705abf73829bbc536dba29a1329a \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt b/plugins/repository-gcs/licenses/grpc-context-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-codec-LICENSE.txt rename to plugins/repository-gcs/licenses/grpc-context-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/grpc-context-NOTICE.txt b/plugins/repository-gcs/licenses/grpc-context-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/guava-20.0.jar.sha1 b/plugins/repository-gcs/licenses/guava-20.0.jar.sha1 new file mode 100644 index 0000000000000..7b6ae09060b29 --- /dev/null +++ b/plugins/repository-gcs/licenses/guava-20.0.jar.sha1 @@ -0,0 +1 @@ +89507701249388e1ed5ddcf8c41f4ce1be7831ef \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/guava-LICENSE.txt b/plugins/repository-gcs/licenses/guava-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/guava-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/guava-NOTICE.txt b/plugins/repository-gcs/licenses/guava-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 b/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 deleted file mode 100644 index 6937112a09fb6..0000000000000 --- a/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 deleted file mode 100644 index 581726601745b..0000000000000 --- a/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 b/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 new file mode 100644 index 0000000000000..c5016bf828d60 --- /dev/null +++ b/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 @@ -0,0 +1 @@ +3c304d70f42f832e0a86d45bd437f692129299a4 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt b/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt b/plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt b/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt b/plugins/repository-gcs/licenses/old/commons-codec-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-codec-NOTICE.txt rename to plugins/repository-gcs/licenses/old/commons-codec-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt b/plugins/repository-gcs/licenses/old/commons-logging-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-logging-LICENSE.txt rename to plugins/repository-gcs/licenses/old/commons-logging-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt b/plugins/repository-gcs/licenses/old/commons-logging-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-logging-NOTICE.txt rename to plugins/repository-gcs/licenses/old/commons-logging-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/old/google-LICENSE.txt b/plugins/repository-gcs/licenses/old/google-LICENSE.txt new file mode 100644 index 0000000000000..980a15ac24eeb --- /dev/null +++ b/plugins/repository-gcs/licenses/old/google-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-NOTICE.txt b/plugins/repository-gcs/licenses/old/google-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/google-NOTICE.txt rename to plugins/repository-gcs/licenses/old/google-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/httpclient-LICENSE.txt b/plugins/repository-gcs/licenses/old/httpclient-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpclient-LICENSE.txt rename to plugins/repository-gcs/licenses/old/httpclient-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/httpclient-NOTICE.txt b/plugins/repository-gcs/licenses/old/httpclient-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpclient-NOTICE.txt rename to plugins/repository-gcs/licenses/old/httpclient-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/httpcore-LICENSE.txt b/plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpcore-LICENSE.txt rename to plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/httpcore-NOTICE.txt b/plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpcore-NOTICE.txt rename to plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/opencensus-LICENSE.txt b/plugins/repository-gcs/licenses/opencensus-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/opencensus-NOTICE.txt b/plugins/repository-gcs/licenses/opencensus-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 new file mode 100644 index 0000000000000..61d8e3b148144 --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 @@ -0,0 +1 @@ +54689fbf750a7f26e34fa1f1f96b883c53f51486 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 new file mode 100644 index 0000000000000..c0b04f0f8ccce --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 @@ -0,0 +1 @@ +82e572b41e81ecf58d0d1e9a3953a05aa8f9c84b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 b/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 new file mode 100644 index 0000000000000..0a2dee4447e92 --- /dev/null +++ b/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 @@ -0,0 +1 @@ +b3282312ba82536fc9a7778cabfde149a875e877 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt b/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt b/plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 b/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 new file mode 100644 index 0000000000000..65c16fed4a07b --- /dev/null +++ b/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 @@ -0,0 +1 @@ +89dcc04a7e028c3c963413a71f950703cf51f057 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/threetenbp-LICENSE.txt b/plugins/repository-gcs/licenses/threetenbp-LICENSE.txt new file mode 100644 index 0000000000000..fcdfc8f0d0774 --- /dev/null +++ b/plugins/repository-gcs/licenses/threetenbp-LICENSE.txt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * * Neither the name of JSR-310 nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ diff --git a/plugins/repository-gcs/licenses/threetenbp-NOTICE.txt b/plugins/repository-gcs/licenses/threetenbp-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle index afd49b9f4dc73..34ec92a354277 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle +++ b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle @@ -69,7 +69,6 @@ task googleCloudStorageFixture(type: AntFixture) { /** A service account file that points to the Google Cloud Storage service emulated by the fixture **/ task createServiceAccountFile() { - dependsOn googleCloudStorageFixture doLast { KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA") keyPairGenerator.initialize(1024) @@ -83,11 +82,7 @@ task createServiceAccountFile() { ' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' + ' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' + ' "client_email": "integration_test@appspot.gserviceaccount.com",\n' + - ' "client_id": "123456789101112130594",\n' + - " \"auth_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/auth\",\n" + - " \"token_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/token\",\n" + - ' "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",\n' + - ' "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/integration_test%40appspot.gserviceaccount.com"\n' + + ' "client_id": "123456789101112130594"\n' + '}', 'UTF-8') } } @@ -109,6 +104,7 @@ integTestCluster { dependsOn createServiceAccountFile, googleCloudStorageFixture /* Use a closure on the string to delay evaluation until tests are executed */ setting 'gcs.client.integration_test.endpoint', "http://${ -> googleCloudStorageFixture.addressAndPort }" + setting 'gcs.client.integration_test.token_uri', "http://${ -> googleCloudStorageFixture.addressAndPort }/o/oauth2/token" } else { println "Using an external service to test the repository-gcs plugin" } diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 2330e230f4505..a9832ae318de4 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -31,13 +31,18 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.zip.GZIPInputStream; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; @@ -52,7 +57,7 @@ */ public class GoogleCloudStorageTestServer { - private static byte[] EMPTY_BYTE = new byte[0]; + private static final byte[] EMPTY_BYTE = new byte[0]; /** List of the buckets stored on this test server **/ private final Map buckets = ConcurrentCollections.newConcurrentMap(); @@ -63,13 +68,6 @@ public class GoogleCloudStorageTestServer { /** Server endpoint **/ private final String endpoint; - /** - * Creates a {@link GoogleCloudStorageTestServer} with the default endpoint - */ - GoogleCloudStorageTestServer() { - this("https://www.googleapis.com"); - } - /** * Creates a {@link GoogleCloudStorageTestServer} with a custom endpoint */ @@ -87,29 +85,6 @@ public String getEndpoint() { return endpoint; } - /** - * Returns a Google Cloud Storage response for the given request - * - * @param method the HTTP method of the request - * @param url the HTTP URL of the request - * @param headers the HTTP headers of the request - * @param body the HTTP request body - * @return a {@link Response} - * - * @throws IOException if something goes wrong - */ - public Response handle(final String method, - final String url, - final Map> headers, - byte[] body) throws IOException { - - final int questionMark = url.indexOf('?'); - if (questionMark == -1) { - return handle(method, url, null, headers, body); - } - return handle(method, url.substring(0, questionMark), url.substring(questionMark + 1), headers, body); - } - /** * Returns a Google Cloud Storage response for the given request * @@ -165,7 +140,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/buckets/get handlers.insert("GET " + endpoint + "/storage/v1/b/{bucket}", (params, headers, body) -> { - String name = params.get("bucket"); + final String name = params.get("bucket"); if (Strings.hasText(name) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "bucket name is missing"); } @@ -181,7 +156,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/get handlers.insert("GET " + endpoint + "/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { - String objectName = params.get("object"); + final String objectName = params.get("object"); if (Strings.hasText(objectName) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); } @@ -191,7 +166,7 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.NOT_FOUND, "bucket not found"); } - for (Map.Entry object : bucket.objects.entrySet()) { + for (final Map.Entry object : bucket.objects.entrySet()) { if (object.getKey().equals(objectName)) { return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(bucket.name, objectName, object.getValue())); } @@ -203,7 +178,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/delete handlers.insert("DELETE " + endpoint + "/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { - String objectName = params.get("object"); + final String objectName = params.get("object"); if (Strings.hasText(objectName) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); } @@ -224,25 +199,149 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/insert handlers.insert("POST " + endpoint + "/upload/storage/v1/b/{bucket}/o", (params, headers, body) -> { - if ("resumable".equals(params.get("uploadType")) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable"); - } - - final String objectName = params.get("name"); - if (Strings.hasText(objectName) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); - } - - final Bucket bucket = buckets.get(params.get("bucket")); - if (bucket == null) { - return newError(RestStatus.NOT_FOUND, "bucket not found"); - } - - if (bucket.objects.put(objectName, EMPTY_BYTE) == null) { - String location = endpoint + "/upload/storage/v1/b/" + bucket.name + "/o?uploadType=resumable&upload_id=" + objectName; - return new Response(RestStatus.CREATED, singletonMap("Location", location), XContentType.JSON.mediaType(), EMPTY_BYTE); + final String uploadType = params.get("uploadType"); + if ("resumable".equals(uploadType)) { + final String objectName = params.get("name"); + if (Strings.hasText(objectName) == false) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + final Bucket bucket = buckets.get(params.get("bucket")); + if (bucket == null) { + return newError(RestStatus.NOT_FOUND, "bucket not found"); + } + if (bucket.objects.putIfAbsent(objectName, EMPTY_BYTE) == null) { + final String location = endpoint + "/upload/storage/v1/b/" + bucket.name + "/o?uploadType=resumable&upload_id=" + + objectName; + return new Response(RestStatus.CREATED, singletonMap("Location", location), XContentType.JSON.mediaType(), EMPTY_BYTE); + } else { + return newError(RestStatus.CONFLICT, "object already exist"); + } + } else if ("multipart".equals(uploadType)) { + /* + * A multipart/related request body looks like this (note the binary dump inside a text blob! nice!): + * --__END_OF_PART__ + * Content-Length: 135 + * Content-Type: application/json; charset=UTF-8 + * content-transfer-encoding: binary + * + * {"bucket":"bucket_test","crc32c":"7XacHQ==","md5Hash":"fVztGkklMlUamsSmJK7W+w==", + * "name":"tests-KEwE3bU4TuyetBgQIghmUw/master.dat-temp"} + * --__END_OF_PART__ + * content-transfer-encoding: binary + * + * KEwE3bU4TuyetBgQIghmUw + * --__END_OF_PART__-- + */ + String boundary = "__END_OF_PART__"; + // Determine the multipart boundary + final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); + if (contentTypes != null) { + final String contentType = contentTypes.get(0); + if ((contentType != null) && contentType.contains("multipart/related; boundary=")) { + boundary = contentType.replace("multipart/related; boundary=", ""); + } + } + InputStream inputStreamBody = new ByteArrayInputStream(body); + final List contentEncodings = headers.getOrDefault("Content-Encoding", headers.get("Content-encoding")); + if (contentEncodings != null) { + if (contentEncodings.stream().anyMatch(x -> "gzip".equalsIgnoreCase(x))) { + inputStreamBody = new GZIPInputStream(inputStreamBody); + } + } + // Read line by line ?both? parts of the multipart. Decoding headers as + // IS_8859_1 is safe. + try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStreamBody, StandardCharsets.ISO_8859_1))) { + String line; + // read first part delimiter + line = reader.readLine(); + if ((line == null) || (line.equals("--" + boundary) == false)) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Does not start with the part delimiter."); + } + final Map> firstPartHeaders = new HashMap<>(); + // Reads the first part's headers, if any + while ((line = reader.readLine()) != null) { + if (line.equals("\r\n") || (line.length() == 0)) { + // end of headers + break; + } else { + final String[] header = line.split(":", 2); + firstPartHeaders.put(header[0], singletonList(header[1])); + } + } + final List firstPartContentTypes = firstPartHeaders.getOrDefault("Content-Type", + firstPartHeaders.get("Content-type")); + if ((firstPartContentTypes == null) + || (firstPartContentTypes.stream().noneMatch(x -> x.contains("application/json")))) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Metadata part expected to have the \"application/json\" content type."); + } + // read metadata part, a single line + line = reader.readLine(); + final byte[] metadata = line.getBytes(StandardCharsets.ISO_8859_1); + if ((firstPartContentTypes != null) && (firstPartContentTypes.stream().anyMatch((x -> x.contains("charset=utf-8"))))) { + // decode as utf-8 + line = new String(metadata, StandardCharsets.UTF_8); + } + final Matcher objectNameMatcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); + objectNameMatcher.find(); + final String objectName = objectNameMatcher.group(1); + final Matcher bucketNameMatcher = Pattern.compile("\"bucket\":\"([^\"]*)\"").matcher(line); + bucketNameMatcher.find(); + final String bucketName = bucketNameMatcher.group(1); + // read second part delimiter + line = reader.readLine(); + if ((line == null) || (line.equals("--" + boundary) == false)) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Second part does not start with delimiter. " + + "Is the metadata multi-line?"); + } + final Map> secondPartHeaders = new HashMap<>(); + // Reads the second part's headers, if any + while ((line = reader.readLine()) != null) { + if (line.equals("\r\n") || (line.length() == 0)) { + // end of headers + break; + } else { + final String[] header = line.split(":", 2); + secondPartHeaders.put(header[0], singletonList(header[1])); + } + } + final List secondPartTransferEncoding = secondPartHeaders.getOrDefault("Content-Transfer-Encoding", + secondPartHeaders.get("content-transfer-encoding")); + if ((secondPartTransferEncoding == null) + || (secondPartTransferEncoding.stream().noneMatch(x -> x.contains("binary")))) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Data part expected to have the \"binary\" content transfer encoding."); + } + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + int c; + while ((c = reader.read()) != -1) { + // one char to one byte, because of the ISO_8859_1 encoding + baos.write(c); + } + final byte[] temp = baos.toByteArray(); + final byte[] trailingEnding = ("\r\n--" + boundary + "--\r\n").getBytes(StandardCharsets.ISO_8859_1); + // check trailing + for (int i = trailingEnding.length - 1; i >= 0; i--) { + if (trailingEnding[i] != temp[(temp.length - trailingEnding.length) + i]) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "Error parsing multipart request."); + } + } + final Bucket bucket = buckets.get(bucketName); + if (bucket == null) { + return newError(RestStatus.NOT_FOUND, "bucket not found"); + } + final byte[] objectData = Arrays.copyOf(temp, temp.length - trailingEnding.length); + if ((objectName != null) && (bucketName != null) && (objectData != null)) { + bucket.objects.put(objectName, objectData); + return new Response(RestStatus.OK, emptyMap(), XContentType.JSON.mediaType(), metadata); + } else { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "error parsing multipart request"); + } + } } else { - return newError(RestStatus.CONFLICT, "object already exist"); + return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable or multipart"); } }); @@ -250,7 +349,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload handlers.insert("PUT " + endpoint + "/upload/storage/v1/b/{bucket}/o", (params, headers, body) -> { - String objectId = params.get("upload_id"); + final String objectId = params.get("upload_id"); if (Strings.hasText(objectId) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload id is missing"); } @@ -268,38 +367,46 @@ private static PathTrie defaultHandlers(final String endpoint, f return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(bucket.name, objectId, body)); }); - // Copy Object + // Rewrite or Copy Object // + // https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite // https://cloud.google.com/storage/docs/json_api/v1/objects/copy - handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/copyTo/b/{destBucket}/o/{dest}", (params, headers, body)-> { - String source = params.get("src"); - if (Strings.hasText(source) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); - } - - final Bucket srcBucket = buckets.get(params.get("srcBucket")); - if (srcBucket == null) { - return newError(RestStatus.NOT_FOUND, "source bucket not found"); - } - - String dest = params.get("dest"); - if (Strings.hasText(dest) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "destination object name is missing"); - } - - final Bucket destBucket = buckets.get(params.get("destBucket")); - if (destBucket == null) { - return newError(RestStatus.NOT_FOUND, "destination bucket not found"); - } - - final byte[] sourceBytes = srcBucket.objects.get(source); - if (sourceBytes == null) { - return newError(RestStatus.NOT_FOUND, "source object not found"); - } - - destBucket.objects.put(dest, sourceBytes); - return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(destBucket.name, dest, sourceBytes)); - }); + handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/{action}/b/{destBucket}/o/{dest}", + (params, headers, body) -> { + final String action = params.get("action"); + if ((action.equals("rewriteTo") == false) && (action.equals("copyTo") == false)) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "Action not implemented. None of \"rewriteTo\" or \"copyTo\"."); + } + final String source = params.get("src"); + if (Strings.hasText(source) == false) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); + } + final Bucket srcBucket = buckets.get(params.get("srcBucket")); + if (srcBucket == null) { + return newError(RestStatus.NOT_FOUND, "source bucket not found"); + } + final String dest = params.get("dest"); + if (Strings.hasText(dest) == false) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "destination object name is missing"); + } + final Bucket destBucket = buckets.get(params.get("destBucket")); + if (destBucket == null) { + return newError(RestStatus.NOT_FOUND, "destination bucket not found"); + } + final byte[] sourceBytes = srcBucket.objects.get(source); + if (sourceBytes == null) { + return newError(RestStatus.NOT_FOUND, "source object not found"); + } + destBucket.objects.put(dest, sourceBytes); + if (action.equals("rewriteTo")) { + final XContentBuilder respBuilder = jsonBuilder(); + buildRewriteResponse(respBuilder, destBucket.name, dest, sourceBytes.length); + return newResponse(RestStatus.OK, emptyMap(), respBuilder); + } else { + assert action.equals("copyTo"); + return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(destBucket.name, dest, sourceBytes)); + } + }); // List Objects // @@ -317,8 +424,8 @@ private static PathTrie defaultHandlers(final String endpoint, f builder.startArray("items"); final String prefixParam = params.get("prefix"); - for (Map.Entry object : bucket.objects.entrySet()) { - if (prefixParam != null && object.getKey().startsWith(prefixParam) == false) { + for (final Map.Entry object : bucket.objects.entrySet()) { + if ((prefixParam != null) && (object.getKey().startsWith(prefixParam) == false)) { continue; } buildObjectResource(builder, bucket.name, object.getKey(), object.getValue()); @@ -333,7 +440,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/request-body handlers.insert("GET " + endpoint + "/download/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { - String object = params.get("object"); + final String object = params.get("object"); if (Strings.hasText(object) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object id is missing"); } @@ -353,7 +460,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // Batch // // https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch - handlers.insert("POST " + endpoint + "/batch", (params, headers, body) -> { + handlers.insert("POST " + endpoint + "/batch/storage/v1", (params, headers, body) -> { final List batchedResponses = new ArrayList<>(); // A batch request body looks like this: @@ -385,7 +492,7 @@ private static PathTrie defaultHandlers(final String endpoint, f final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); if (contentTypes != null) { final String contentType = contentTypes.get(0); - if (contentType != null && contentType.contains("multipart/mixed; boundary=")) { + if ((contentType != null) && contentType.contains("multipart/mixed; boundary=")) { boundary = contentType.replace("multipart/mixed; boundary=", ""); } } @@ -398,25 +505,25 @@ private static PathTrie defaultHandlers(final String endpoint, f while ((line = reader.readLine()) != null) { // Start of a batched request if (line.equals("--" + boundary)) { - Map> batchedHeaders = new HashMap<>(); + final Map> batchedHeaders = new HashMap<>(); // Reads the headers, if any while ((line = reader.readLine()) != null) { - if (line.equals("\r\n") || line.length() == 0) { + if (line.equals("\r\n") || (line.length() == 0)) { // end of headers break; } else { - String[] header = line.split(":", 2); + final String[] header = line.split(":", 2); batchedHeaders.put(header[0], singletonList(header[1])); } } // Reads the method and URL line = reader.readLine(); - String batchedUrl = line.substring(0, line.lastIndexOf(' ')); + final String batchedUrl = line.substring(0, line.lastIndexOf(' ')); final Map batchedParams = new HashMap<>(); - int questionMark = batchedUrl.indexOf('?'); + final int questionMark = batchedUrl.indexOf('?'); if (questionMark != -1) { RestUtils.decodeQueryString(batchedUrl.substring(questionMark + 1), 0, batchedParams); } @@ -424,16 +531,16 @@ private static PathTrie defaultHandlers(final String endpoint, f // Reads the body line = reader.readLine(); byte[] batchedBody = new byte[0]; - if (line != null || line.startsWith("--" + boundary) == false) { + if ((line != null) || (line.startsWith("--" + boundary) == false)) { batchedBody = line.getBytes(StandardCharsets.UTF_8); } // Executes the batched request - RequestHandler handler = handlers.retrieve(batchedUrl, batchedParams); + final RequestHandler handler = handlers.retrieve(batchedUrl, batchedParams); if (handler != null) { try { batchedResponses.add(handler.execute(batchedParams, batchedHeaders, batchedBody)); - } catch (IOException e) { + } catch (final IOException e) { batchedResponses.add(newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); } } @@ -442,11 +549,11 @@ private static PathTrie defaultHandlers(final String endpoint, f } // Now we can build the response - String sep = "--"; - String line = "\r\n"; + final String sep = "--"; + final String line = "\r\n"; - StringBuilder builder = new StringBuilder(); - for (Response response : batchedResponses) { + final StringBuilder builder = new StringBuilder(); + for (final Response response : batchedResponses) { builder.append(sep).append(boundary).append(line); builder.append("Content-Type: application/http").append(line); builder.append(line); @@ -465,7 +572,7 @@ private static PathTrie defaultHandlers(final String endpoint, f builder.append(line); builder.append(sep).append(boundary).append(sep); - byte[] content = builder.toString().getBytes(StandardCharsets.UTF_8); + final byte[] content = builder.toString().getBytes(StandardCharsets.UTF_8); return new Response(RestStatus.OK, emptyMap(), "multipart/mixed; boundary=" + boundary, content); }); @@ -525,7 +632,7 @@ private static Response newResponse(final RestStatus status, final Map { - try { - Bucket bucket = client.buckets().get(bucketName).execute(); - if (bucket != null) { - return Strings.hasText(bucket.getId()); - } - } catch (GoogleJsonResponseException e) { - GoogleJsonError error = e.getDetails(); - if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - return false; - } - throw e; - } - return false; - }); - } catch (IOException e) { + final Bucket bucket = SocketAccess.doPrivilegedIOException(() -> storage.get(bucketName)); + return bucket != null; + } catch (final Exception e) { throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); } } /** - * List all blobs in the bucket + * List blobs in the bucket under the specified path. The path root is removed. * - * @param path base path of the blobs to list + * @param path + * base path of the blobs to list * @return a map of blob names and their metadata */ Map listBlobs(String path) throws IOException { - return SocketAccess.doPrivilegedIOException(() -> listBlobsByPath(bucket, path, path)); + return listBlobsByPrefix(path, ""); } /** * List all blobs in the bucket which have a prefix * - * @param path base path of the blobs to list - * @param prefix prefix of the blobs to list - * @return a map of blob names and their metadata + * @param path + * base path of the blobs to list. This path is removed from the + * names of the blobs returned. + * @param prefix + * prefix of the blobs to list. + * @return a map of blob names and their metadata. */ Map listBlobsByPrefix(String path, String prefix) throws IOException { - return SocketAccess.doPrivilegedIOException(() -> listBlobsByPath(bucket, buildKey(path, prefix), path)); - } - - /** - * Lists all blobs in a given bucket - * - * @param bucketName name of the bucket - * @param path base path of the blobs to list - * @param pathToRemove if true, this path part is removed from blob name - * @return a map of blob names and their metadata - */ - private Map listBlobsByPath(String bucketName, String path, String pathToRemove) throws IOException { - return blobsStream(client, bucketName, path, MAX_BATCHING_REQUESTS) - .map(new BlobMetaDataConverter(pathToRemove)) - .collect(Collectors.toMap(PlainBlobMetaData::name, Function.identity())); + final String pathPrefix = buildKey(path, prefix); + final MapBuilder mapBuilder = MapBuilder.newMapBuilder(); + SocketAccess.doPrivilegedVoidIOException(() -> { + storage.get(bucket).list(BlobListOption.prefix(pathPrefix)).iterateAll().forEach(blob -> { + assert blob.getName().startsWith(path); + final String suffixName = blob.getName().substring(path.length()); + mapBuilder.put(suffixName, new PlainBlobMetaData(suffixName, blob.getSize())); + }); + }); + return mapBuilder.immutableMap(); } /** @@ -161,19 +143,9 @@ private Map listBlobsByPath(String bucketName, String path * @return true if the blob exists, false otherwise */ boolean blobExists(String blobName) throws IOException { - try { - StorageObject blob = SocketAccess.doPrivilegedIOException(() -> client.objects().get(bucket, blobName).execute()); - if (blob != null) { - return Strings.hasText(blob.getId()); - } - } catch (GoogleJsonResponseException e) { - GoogleJsonError error = e.getDetails(); - if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - return false; - } - throw e; - } - return false; + final BlobId blobId = BlobId.of(bucket, blobName); + final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + return blob != null; } /** @@ -183,18 +155,29 @@ boolean blobExists(String blobName) throws IOException { * @return an InputStream */ InputStream readBlob(String blobName) throws IOException { - try { - return SocketAccess.doPrivilegedIOException(() -> { - Storage.Objects.Get object = client.objects().get(bucket, blobName); - return object.executeMediaAsInputStream(); - }); - } catch (GoogleJsonResponseException e) { - GoogleJsonError error = e.getDetails(); - if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - throw new NoSuchFileException(e.getMessage()); - } - throw e; + final BlobId blobId = BlobId.of(bucket, blobName); + final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + if (blob == null) { + throw new NoSuchFileException("Blob [" + blobName + "] does not exit"); } + final ReadChannel readChannel = SocketAccess.doPrivilegedIOException(blob::reader); + return Channels.newInputStream(new ReadableByteChannel() { + @SuppressForbidden(reason = "Channel is based of a socket not a file") + @Override + public int read(ByteBuffer dst) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> readChannel.read(dst)); + } + + @Override + public boolean isOpen() { + return readChannel.isOpen(); + } + + @Override + public void close() throws IOException { + SocketAccess.doPrivilegedVoidIOException(readChannel::close); + } + }); } /** @@ -204,14 +187,58 @@ InputStream readBlob(String blobName) throws IOException { * @param blobSize expected size of the blob to be written */ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - SocketAccess.doPrivilegedVoidIOException(() -> { - InputStreamContent stream = new InputStreamContent(null, inputStream); - stream.setLength(blobSize); + final BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).build(); + if (blobSize > LARGE_BLOB_THRESHOLD_BYTE_SIZE) { + writeBlobResumable(blobInfo, inputStream); + } else { + writeBlobMultipart(blobInfo, inputStream, blobSize); + } + } - Storage.Objects.Insert insert = client.objects().insert(bucket, null, stream); - insert.setName(blobName); - insert.execute(); - }); + /** + * Uploads a blob using the "resumable upload" method (multiple requests, which + * can be independently retried in case of failure, see + * https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload + * + * @param blobInfo the info for the blob to be uploaded + * @param inputStream the stream containing the blob data + */ + private void writeBlobResumable(BlobInfo blobInfo, InputStream inputStream) throws IOException { + final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); + Streams.copy(inputStream, Channels.newOutputStream(new WritableByteChannel() { + @Override + public boolean isOpen() { + return writeChannel.isOpen(); + } + + @Override + public void close() throws IOException { + SocketAccess.doPrivilegedVoidIOException(writeChannel::close); + } + + @SuppressForbidden(reason = "Channel is based of a socket not a file") + @Override + public int write(ByteBuffer src) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); + } + })); + } + + /** + * Uploads a blob using the "multipart upload" method (a single + * 'multipart/related' request containing both data and metadata. The request is + * gziped), see: + * https://cloud.google.com/storage/docs/json_api/v1/how-tos/multipart-upload + * + * @param blobInfo the info for the blob to be uploaded + * @param inputStream the stream containing the blob data + * @param blobSize the size + */ + private void writeBlobMultipart(BlobInfo blobInfo, InputStream inputStream, long blobSize) throws IOException { + assert blobSize <= LARGE_BLOB_THRESHOLD_BYTE_SIZE : "large blob uploads should use the resumable upload method"; + final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); + Streams.copy(inputStream, baos); + SocketAccess.doPrivilegedVoidIOException(() -> storage.create(blobInfo, baos.toByteArray())); } /** @@ -220,10 +247,11 @@ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws I * @param blobName name of the blob */ void deleteBlob(String blobName) throws IOException { - if (!blobExists(blobName)) { + final BlobId blobId = BlobId.of(bucket, blobName); + final boolean deleted = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobId)); + if (deleted == false) { throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); } - SocketAccess.doPrivilegedIOException(() -> client.objects().delete(bucket, blobName).execute()); } /** @@ -232,7 +260,7 @@ void deleteBlob(String blobName) throws IOException { * @param prefix prefix of the buckets to delete */ void deleteBlobsByPrefix(String prefix) throws IOException { - deleteBlobs(listBlobsByPath(bucket, prefix, null).keySet()); + deleteBlobs(listBlobsByPrefix("", prefix).keySet()); } /** @@ -241,163 +269,55 @@ void deleteBlobsByPrefix(String prefix) throws IOException { * @param blobNames names of the bucket to delete */ void deleteBlobs(Collection blobNames) throws IOException { - if (blobNames == null || blobNames.isEmpty()) { + if (blobNames.isEmpty()) { return; } - + // for a single op submit a simple delete instead of a batch of size 1 if (blobNames.size() == 1) { deleteBlob(blobNames.iterator().next()); return; } - final List deletions = new ArrayList<>(Math.min(MAX_BATCHING_REQUESTS, blobNames.size())); - final Iterator blobs = blobNames.iterator(); - - SocketAccess.doPrivilegedVoidIOException(() -> { - while (blobs.hasNext()) { - // Create a delete request for each blob to delete - deletions.add(client.objects().delete(bucket, blobs.next())); - - if (blobs.hasNext() == false || deletions.size() == MAX_BATCHING_REQUESTS) { - try { - // Deletions are executed using a batch request - BatchRequest batch = client.batch(); - - // Used to track successful deletions - CountDown countDown = new CountDown(deletions.size()); - - for (Storage.Objects.Delete delete : deletions) { - // Queue the delete request in batch - delete.queue(batch, new JsonBatchCallback() { - @Override - public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { - logger.error("failed to delete blob [{}] in bucket [{}]: {}", delete.getObject(), delete.getBucket(), e - .getMessage()); - } - - @Override - public void onSuccess(Void aVoid, HttpHeaders responseHeaders) throws IOException { - countDown.countDown(); - } - }); - } - - batch.execute(); - - if (countDown.isCountedDown() == false) { - throw new IOException("Failed to delete all [" + deletions.size() + "] blobs"); - } - } finally { - deletions.clear(); - } - } + final List blobIdsToDelete = blobNames.stream().map(blobName -> BlobId.of(bucket, blobName)).collect(Collectors.toList()); + final List deletedStatuses = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobIdsToDelete)); + assert blobIdsToDelete.size() == deletedStatuses.size(); + boolean failed = false; + for (int i = 0; i < blobIdsToDelete.size(); i++) { + if (deletedStatuses.get(i) == false) { + logger.error("Failed to delete blob [{}] in bucket [{}]", blobIdsToDelete.get(i).getName(), bucket); + failed = true; } - }); + } + if (failed) { + throw new IOException("Failed to delete all [" + blobIdsToDelete.size() + "] blobs"); + } } /** * Moves a blob within the same bucket * * @param sourceBlob name of the blob to move - * @param targetBlob new name of the blob in the target bucket + * @param targetBlob new name of the blob in the same bucket */ - void moveBlob(String sourceBlob, String targetBlob) throws IOException { - SocketAccess.doPrivilegedIOException(() -> { + void moveBlob(String sourceBlobName, String targetBlobName) throws IOException { + final BlobId sourceBlobId = BlobId.of(bucket, sourceBlobName); + final BlobId targetBlobId = BlobId.of(bucket, targetBlobName); + final CopyRequest request = CopyRequest.newBuilder() + .setSource(sourceBlobId) + .setTarget(targetBlobId) + .build(); + SocketAccess.doPrivilegedVoidIOException(() -> { // There's no atomic "move" in GCS so we need to copy and delete - client.objects().copy(bucket, sourceBlob, bucket, targetBlob, null).execute(); - client.objects().delete(bucket, sourceBlob).execute(); - return null; + storage.copy(request).getResult(); + final boolean deleted = storage.delete(sourceBlobId); + if (deleted == false) { + throw new IOException("Failed to move source [" + sourceBlobName + "] to target [" + targetBlobName + "]"); + } }); } - private String buildKey(String keyPath, String s) { + private static String buildKey(String keyPath, String s) { assert s != null; return keyPath + s; } - /** - * Converts a {@link StorageObject} to a {@link PlainBlobMetaData} - */ - class BlobMetaDataConverter implements Function { - - private final String pathToRemove; - - BlobMetaDataConverter(String pathToRemove) { - this.pathToRemove = pathToRemove; - } - - @Override - public PlainBlobMetaData apply(StorageObject storageObject) { - String blobName = storageObject.getName(); - if (Strings.hasLength(pathToRemove)) { - blobName = blobName.substring(pathToRemove.length()); - } - return new PlainBlobMetaData(blobName, storageObject.getSize().longValue()); - } - } - - /** - * Spliterator can be used to list storage objects stored in a bucket. - */ - static class StorageObjectsSpliterator implements Spliterator { - - private final Storage.Objects.List list; - - StorageObjectsSpliterator(Storage client, String bucketName, String prefix, long pageSize) throws IOException { - list = SocketAccess.doPrivilegedIOException(() -> client.objects().list(bucketName)); - list.setMaxResults(pageSize); - if (prefix != null) { - list.setPrefix(prefix); - } - } - - @Override - public boolean tryAdvance(Consumer action) { - try { - // Retrieves the next page of items - Objects objects = SocketAccess.doPrivilegedIOException(list::execute); - - if ((objects == null) || (objects.getItems() == null) || (objects.getItems().isEmpty())) { - return false; - } - - // Consumes all the items - objects.getItems().forEach(action::accept); - - // Sets the page token of the next page, - // null indicates that all items have been consumed - String next = objects.getNextPageToken(); - if (next != null) { - list.setPageToken(next); - return true; - } - - return false; - } catch (Exception e) { - throw new BlobStoreException("Exception while listing objects", e); - } - } - - @Override - public Spliterator trySplit() { - return null; - } - - @Override - public long estimateSize() { - return Long.MAX_VALUE; - } - - @Override - public int characteristics() { - return 0; - } - } - - /** - * Returns a {@link Stream} of {@link StorageObject}s that are stored in a given bucket. - */ - static Stream blobsStream(Storage client, String bucketName, String prefix, long pageSize) throws IOException { - return StreamSupport.stream(new StorageObjectsSpliterator(client, bucketName, prefix, pageSize), false); - } - } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index 03295c18c8ae6..99df38413326c 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -18,8 +18,10 @@ */ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; import com.google.api.services.storage.StorageScopes; +import com.google.auth.oauth2.ServiceAccountCredentials; + +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -28,10 +30,12 @@ import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; +import java.net.URI; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.Locale; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.settings.Setting.timeSetting; @@ -44,11 +48,19 @@ public class GoogleCloudStorageClientSettings { /** A json Service Account file loaded from secure settings. */ static final Setting.AffixSetting CREDENTIALS_FILE_SETTING = Setting.affixKeySetting(PREFIX, "credentials_file", - key -> SecureSetting.secureFile(key, null)); + key -> SecureSetting.secureFile(key, null)); /** An override for the Storage endpoint to connect to. */ static final Setting.AffixSetting ENDPOINT_SETTING = Setting.affixKeySetting(PREFIX, "endpoint", - key -> new Setting<>(key, "", s -> s, Setting.Property.NodeScope)); + key -> Setting.simpleString(key, Setting.Property.NodeScope)); + + /** An override for the Google Project ID. */ + static final Setting.AffixSetting PROJECT_ID_SETTING = Setting.affixKeySetting(PREFIX, "project_id", + key -> Setting.simpleString(key, Setting.Property.NodeScope)); + + /** An override for the Token Server URI in the oauth flow. */ + static final Setting.AffixSetting TOKEN_URI_SETTING = Setting.affixKeySetting(PREFIX, "token_uri", + key -> new Setting<>(key, "", URI::create, Setting.Property.NodeScope)); /** * The timeout to establish a connection. A value of {@code -1} corresponds to an infinite timeout. A value of {@code 0} @@ -64,45 +76,59 @@ public class GoogleCloudStorageClientSettings { static final Setting.AffixSetting READ_TIMEOUT_SETTING = Setting.affixKeySetting(PREFIX, "read_timeout", key -> timeSetting(key, TimeValue.ZERO, TimeValue.MINUS_ONE, Setting.Property.NodeScope)); - /** Name used by the client when it uses the Google Cloud JSON API. **/ + /** Name used by the client when it uses the Google Cloud JSON API. */ static final Setting.AffixSetting APPLICATION_NAME_SETTING = Setting.affixKeySetting(PREFIX, "application_name", - key -> new Setting<>(key, "repository-gcs", s -> s, Setting.Property.NodeScope)); + key -> new Setting<>(key, "repository-gcs", Function.identity(), Setting.Property.NodeScope, Setting.Property.Deprecated)); - /** The credentials used by the client to connect to the Storage endpoint **/ - private final GoogleCredential credential; + /** The credentials used by the client to connect to the Storage endpoint. */ + private final ServiceAccountCredentials credential; - /** The Storage root URL the client should talk to, or empty string to use the default. **/ + /** The Storage endpoint URL the client should talk to. Null value sets the default. */ private final String endpoint; - /** The timeout to establish a connection **/ + /** The Google project ID overriding the default way to infer it. Null value sets the default. */ + private final String projectId; + + /** The timeout to establish a connection */ private final TimeValue connectTimeout; - /** The timeout to read data from an established connection **/ + /** The timeout to read data from an established connection */ private final TimeValue readTimeout; - /** The Storage client application name **/ + /** The Storage client application name */ private final String applicationName; - GoogleCloudStorageClientSettings(final GoogleCredential credential, + /** The token server URI. This leases access tokens in the oauth flow. */ + private final URI tokenUri; + + GoogleCloudStorageClientSettings(final ServiceAccountCredentials credential, final String endpoint, + final String projectId, final TimeValue connectTimeout, final TimeValue readTimeout, - final String applicationName) { + final String applicationName, + final URI tokenUri) { this.credential = credential; this.endpoint = endpoint; + this.projectId = projectId; this.connectTimeout = connectTimeout; this.readTimeout = readTimeout; this.applicationName = applicationName; + this.tokenUri = tokenUri; } - public GoogleCredential getCredential() { + public ServiceAccountCredentials getCredential() { return credential; } - public String getEndpoint() { + public String getHost() { return endpoint; } + public String getProjectId() { + return Strings.hasLength(projectId) ? projectId : (credential != null ? credential.getProjectId() : null); + } + public TimeValue getConnectTimeout() { return connectTimeout; } @@ -115,9 +141,13 @@ public String getApplicationName() { return applicationName; } + public URI getTokenUri() { + return tokenUri; + } + public static Map load(final Settings settings) { final Map clients = new HashMap<>(); - for (String clientName: settings.getGroups(PREFIX).keySet()) { + for (final String clientName: settings.getGroups(PREFIX).keySet()) { clients.put(clientName, getClientSettings(settings, clientName)); } if (clients.containsKey("default") == false) { @@ -132,22 +162,27 @@ static GoogleCloudStorageClientSettings getClientSettings(final Settings setting return new GoogleCloudStorageClientSettings( loadCredential(settings, clientName), getConfigValue(settings, clientName, ENDPOINT_SETTING), + getConfigValue(settings, clientName, PROJECT_ID_SETTING), getConfigValue(settings, clientName, CONNECT_TIMEOUT_SETTING), getConfigValue(settings, clientName, READ_TIMEOUT_SETTING), - getConfigValue(settings, clientName, APPLICATION_NAME_SETTING) + getConfigValue(settings, clientName, APPLICATION_NAME_SETTING), + getConfigValue(settings, clientName, TOKEN_URI_SETTING) ); } /** - * Loads the service account file corresponding to a given client name. If no file is defined for the client, - * a {@code null} credential is returned. + * Loads the service account file corresponding to a given client name. If no + * file is defined for the client, a {@code null} credential is returned. * - * @param settings the {@link Settings} - * @param clientName the client name + * @param settings + * the {@link Settings} + * @param clientName + * the client name * - * @return the {@link GoogleCredential} to use for the given client, {@code null} if no service account is defined. + * @return the {@link ServiceAccountCredentials} to use for the given client, + * {@code null} if no service account is defined. */ - static GoogleCredential loadCredential(final Settings settings, final String clientName) { + static ServiceAccountCredentials loadCredential(final Settings settings, final String clientName) { try { if (CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).exists(settings) == false) { // explicitly returning null here so that the default credential @@ -155,19 +190,22 @@ static GoogleCredential loadCredential(final Settings settings, final String cli return null; } try (InputStream credStream = CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).get(settings)) { - GoogleCredential credential = GoogleCredential.fromStream(credStream); - if (credential.createScopedRequired()) { - credential = credential.createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); - } - return credential; + final Collection scopes = Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL); + return SocketAccess.doPrivilegedIOException(() -> { + final ServiceAccountCredentials credentials = ServiceAccountCredentials.fromStream(credStream); + if (credentials.createScopedRequired()) { + return (ServiceAccountCredentials) credentials.createScoped(scopes); + } + return credentials; + }); } - } catch (IOException e) { + } catch (final IOException e) { throw new UncheckedIOException(e); } } private static T getConfigValue(final Settings settings, final String clientName, final Setting.AffixSetting clientSetting) { - Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); + final Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); return concreteSetting.get(settings); } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index ef24cd959e55b..1d2d70584adf9 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -19,21 +19,6 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.auth.oauth2.TokenRequest; -import com.google.api.client.auth.oauth2.TokenResponse; -import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.http.GenericUrl; -import com.google.api.client.http.HttpHeaders; -import com.google.api.client.json.GenericJson; -import com.google.api.client.json.webtoken.JsonWebSignature; -import com.google.api.client.json.webtoken.JsonWebToken; -import com.google.api.client.util.ClassInfo; -import com.google.api.client.util.Data; -import com.google.api.services.storage.Storage; -import com.google.api.services.storage.model.Bucket; -import com.google.api.services.storage.model.Objects; -import com.google.api.services.storage.model.StorageObject; -import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -42,8 +27,6 @@ import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -51,63 +34,6 @@ public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { - static { - /* - * Google HTTP client changes access levels because its silly and we - * can't allow that on any old stack stack so we pull it here, up front, - * so we can cleanly check the permissions for it. Without this changing - * the permission can fail if any part of core is on the stack because - * our plugin permissions don't allow core to "reach through" plugins to - * change the permission. Because that'd be silly. - */ - SpecialPermission.check(); - AccessController.doPrivileged((PrivilegedAction) () -> { - // ClassInfo put in cache all the fields of a given class - // that are annoted with @Key; at the same time it changes - // the field access level using setAccessible(). Calling - // them here put the ClassInfo in cache (they are never evicted) - // before the SecurityManager is installed. - ClassInfo.of(HttpHeaders.class, true); - - ClassInfo.of(JsonWebSignature.Header.class, false); - ClassInfo.of(JsonWebToken.Payload.class, false); - - ClassInfo.of(TokenRequest.class, false); - ClassInfo.of(TokenResponse.class, false); - - ClassInfo.of(GenericJson.class, false); - ClassInfo.of(GenericUrl.class, false); - - Data.nullOf(GoogleJsonError.ErrorInfo.class); - ClassInfo.of(GoogleJsonError.class, false); - - Data.nullOf(Bucket.Cors.class); - ClassInfo.of(Bucket.class, false); - ClassInfo.of(Bucket.Cors.class, false); - ClassInfo.of(Bucket.Lifecycle.class, false); - ClassInfo.of(Bucket.Logging.class, false); - ClassInfo.of(Bucket.Owner.class, false); - ClassInfo.of(Bucket.Versioning.class, false); - ClassInfo.of(Bucket.Website.class, false); - - ClassInfo.of(StorageObject.class, false); - ClassInfo.of(StorageObject.Owner.class, false); - - ClassInfo.of(Objects.class, false); - - ClassInfo.of(Storage.Buckets.Get.class, false); - ClassInfo.of(Storage.Buckets.Insert.class, false); - - ClassInfo.of(Storage.Objects.Get.class, false); - ClassInfo.of(Storage.Objects.Insert.class, false); - ClassInfo.of(Storage.Objects.Delete.class, false); - ClassInfo.of(Storage.Objects.Copy.class, false); - ClassInfo.of(Storage.Objects.List.class, false); - - return null; - }); - } - private final Map clientsSettings; public GoogleCloudStoragePlugin(final Settings settings) { @@ -134,8 +60,10 @@ public List> getSettings() { return Arrays.asList( GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING, GoogleCloudStorageClientSettings.ENDPOINT_SETTING, + GoogleCloudStorageClientSettings.PROJECT_ID_SETTING, GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING, GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING, - GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING); + GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING, + GoogleCloudStorageClientSettings.TOKEN_URI_SETTING); } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index e193b8238b8d2..976befae0a269 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.services.storage.Storage; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; @@ -27,7 +26,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.RepositoryException; @@ -39,7 +37,8 @@ import static org.elasticsearch.common.settings.Setting.boolSetting; import static org.elasticsearch.common.settings.Setting.byteSizeSetting; import static org.elasticsearch.common.settings.Setting.simpleString; -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + +import com.google.cloud.storage.Storage; class GoogleCloudStorageRepository extends BlobStoreRepository { diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index bccc5e0ffdc5c..57bcc4b131356 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -19,23 +19,26 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.http.HttpBackOffIOExceptionHandler; -import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.googleapis.GoogleUtils; import com.google.api.client.http.HttpTransport; -import com.google.api.client.http.HttpUnsuccessfulResponseHandler; -import com.google.api.client.json.jackson2.JacksonFactory; -import com.google.api.client.util.ExponentialBackOff; -import com.google.api.services.storage.Storage; +import com.google.api.client.http.javanet.DefaultConnectionFactory; +import com.google.api.client.http.javanet.NetHttpTransport; +import com.google.auth.oauth2.ServiceAccountCredentials; +import com.google.cloud.http.HttpTransportOptions; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageOptions; + import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; import java.util.Map; public class GoogleCloudStorageService extends AbstractComponent { @@ -51,42 +54,107 @@ public GoogleCloudStorageService(final Environment environment, final Map httpTransport) + .build(); + final StorageOptions.Builder storageOptionsBuilder = StorageOptions.newBuilder() + .setTransportOptions(httpTransportOptions) + .setHeaderProvider(() -> { + final MapBuilder mapBuilder = MapBuilder.newMapBuilder(); + if (Strings.hasLength(clientSettings.getApplicationName())) { + mapBuilder.put("user-agent", clientSettings.getApplicationName()); + } + return mapBuilder.immutableMap(); + }); + if (Strings.hasLength(clientSettings.getHost())) { + storageOptionsBuilder.setHost(clientSettings.getHost()); } - if (Strings.hasLength(clientSettings.getEndpoint())) { - storage.setRootUrl(clientSettings.getEndpoint()); + if (Strings.hasLength(clientSettings.getProjectId())) { + storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); } - return storage.build(); + if (clientSettings.getCredential() == null) { + logger.warn("\"Application Default Credentials\" are not supported out of the box." + + " Additional file system permissions have to be granted to the plugin."); + } else { + ServiceAccountCredentials serviceAccountCredentials = clientSettings.getCredential(); + // override token server URI + final URI tokenServerUri = clientSettings.getTokenUri(); + if (Strings.hasLength(tokenServerUri.toString())) { + // Rebuild the service account credentials in order to use a custom Token url. + // This is mostly used for testing purpose. + serviceAccountCredentials = serviceAccountCredentials.toBuilder().setTokenServerUri(tokenServerUri).build(); + } + storageOptionsBuilder.setCredentials(serviceAccountCredentials); + } + return storageOptionsBuilder.build().getService(); } - static HttpRequestInitializer createRequestInitializer(final GoogleCloudStorageClientSettings settings) throws IOException { - GoogleCredential credential = settings.getCredential(); - if (credential == null) { - credential = GoogleCredential.getApplicationDefault(); + /** + * Pins the TLS trust certificates and, more importantly, overrides connection + * URLs in the case of a custom endpoint setting because some connections don't + * fully honor this setting (bugs in the SDK). The default connection factory + * opens a new connection for each request. This is required for the storage + * instance to be thread-safe. + **/ + private static HttpTransport createHttpTransport(final String endpoint) throws Exception { + final NetHttpTransport.Builder builder = new NetHttpTransport.Builder(); + // requires java.lang.RuntimePermission "setFactory" + builder.trustCertificates(GoogleUtils.getCertificateTrustStore()); + if (Strings.hasLength(endpoint)) { + final URL endpointUrl = URI.create(endpoint).toURL(); + builder.setConnectionFactory(new DefaultConnectionFactory() { + @Override + public HttpURLConnection openConnection(final URL originalUrl) throws IOException { + // test if the URL is built correctly, ie following the `host` setting + if (originalUrl.getHost().equals(endpointUrl.getHost()) && originalUrl.getPort() == endpointUrl.getPort() + && originalUrl.getProtocol().equals(endpointUrl.getProtocol())) { + return super.openConnection(originalUrl); + } + // override connection URLs because some don't follow the config. See + // https://github.com/GoogleCloudPlatform/google-cloud-java/issues/3254 and + // https://github.com/GoogleCloudPlatform/google-cloud-java/issues/3255 + URI originalUri; + try { + originalUri = originalUrl.toURI(); + } catch (final URISyntaxException e) { + throw new RuntimeException(e); + } + String overridePath = "/"; + if (originalUri.getRawPath() != null) { + overridePath = originalUri.getRawPath(); + } + if (originalUri.getRawQuery() != null) { + overridePath += "?" + originalUri.getRawQuery(); + } + return super.openConnection( + new URL(endpointUrl.getProtocol(), endpointUrl.getHost(), endpointUrl.getPort(), overridePath)); + } + }); } - return new DefaultHttpRequestInitializer(credential, toTimeout(settings.getConnectTimeout()), toTimeout(settings.getReadTimeout())); + return builder.build(); } - /** Converts timeout values from the settings to a timeout value for the Google Cloud SDK **/ + /** + * Converts timeout values from the settings to a timeout value for the Google + * Cloud SDK + **/ static Integer toTimeout(final TimeValue timeout) { // Null or zero in settings means the default timeout if (timeout == null || TimeValue.ZERO.equals(timeout)) { - return null; + // negative value means using the default value + return -1; } // -1 means infinite timeout if (TimeValue.MINUS_ONE.equals(timeout)) { @@ -96,51 +164,4 @@ static Integer toTimeout(final TimeValue timeout) { return Math.toIntExact(timeout.getMillis()); } - /** - * HTTP request initializer that set timeouts and backoff handler while deferring authentication to GoogleCredential. - * See https://cloud.google.com/storage/transfer/create-client#retry - */ - static class DefaultHttpRequestInitializer implements HttpRequestInitializer { - - private final Integer connectTimeout; - private final Integer readTimeout; - private final GoogleCredential credential; - - DefaultHttpRequestInitializer(GoogleCredential credential, Integer connectTimeoutMillis, Integer readTimeoutMillis) { - this.credential = credential; - this.connectTimeout = connectTimeoutMillis; - this.readTimeout = readTimeoutMillis; - } - - @Override - public void initialize(HttpRequest request) { - if (connectTimeout != null) { - request.setConnectTimeout(connectTimeout); - } - if (readTimeout != null) { - request.setReadTimeout(readTimeout); - } - - request.setIOExceptionHandler(new HttpBackOffIOExceptionHandler(newBackOff())); - request.setInterceptor(credential); - - final HttpUnsuccessfulResponseHandler handler = new HttpBackOffUnsuccessfulResponseHandler(newBackOff()); - request.setUnsuccessfulResponseHandler((req, resp, supportsRetry) -> { - // Let the credential handle the response. If it failed, we rely on our backoff handler - return credential.handleResponse(req, resp, supportsRetry) || handler.handleResponse(req, resp, supportsRetry); - } - ); - } - - private ExponentialBackOff newBackOff() { - return new ExponentialBackOff.Builder() - .setInitialIntervalMillis(100) - .setMaxIntervalMillis(6000) - .setMaxElapsedTimeMillis(900000) - .setMultiplier(1.5) - .setRandomizationFactor(0.5) - .build(); - } - } - } diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy index ce9b0334638a0..fffe6cbbc0f24 100644 --- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -18,11 +18,12 @@ */ grant { + // required by: com.google.api.client.json.JsonParser#parseValue permission java.lang.RuntimePermission "accessDeclaredMembers"; - permission java.lang.RuntimePermission "setFactory"; + // required by: com.google.api.client.json.GenericJson# permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; - permission java.net.URLPermission "http://www.googleapis.com/*", "*"; - permission java.net.URLPermission "https://www.googleapis.com/*", "*"; + // required to add google certs to the gcs client trustore + permission java.lang.RuntimePermission "setFactory"; // gcs client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; diff --git a/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java new file mode 100644 index 0000000000000..f2b8a0571ad87 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.google.cloud.storage; + +import com.google.cloud.storage.spi.v1.StorageRpc; + +import static org.mockito.Mockito.mock; + +/** + * Utility class that exposed Google SDK package protected methods to + * create specific StorageRpc objects in unit tests. + */ +public class StorageRpcOptionUtils { + + private StorageRpcOptionUtils(){} + + public static String getPrefix(final Storage.BlobListOption... options) { + if (options != null) { + for (final Option option : options) { + final StorageRpc.Option rpcOption = option.getRpcOption(); + if (StorageRpc.Option.PREFIX.equals(rpcOption)) { + return (String) option.getValue(); + } + } + } + return null; + } + + public static CopyWriter createCopyWriter(final Blob result) { + return new CopyWriter(mock(StorageOptions.class), mock(StorageRpc.RewriteResponse.class)) { + @Override + public Blob getResult() { + return result; + } + }; + } +} diff --git a/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java new file mode 100644 index 0000000000000..68175d7f1be53 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.google.cloud.storage; + +/** + * Utility class that exposed Google SDK package protected methods to + * create buckets and blobs objects in unit tests. + */ +public class StorageTestUtils { + + private StorageTestUtils(){} + + public static Bucket createBucket(final Storage storage, final String bucketName) { + return new Bucket(storage, (BucketInfo.BuilderImpl) BucketInfo.newBuilder(bucketName)); + } + + public static Blob createBlob(final Storage storage, final String bucketName, final String blobName, final long blobSize) { + return new Blob(storage, (BlobInfo.BuilderImpl) BlobInfo.newBuilder(bucketName, blobName).setSize(blobSize)); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 19551f3b082fa..c4d9b67899672 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.services.storage.Storage; +import com.google.cloud.storage.Storage; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index badd86cd8a2b3..14cb4fa242e7d 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -18,20 +18,25 @@ */ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; import com.google.api.services.storage.StorageScopes; +import com.google.auth.oauth2.ServiceAccountCredentials; + import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; +import java.net.URI; import java.nio.charset.StandardCharsets; import java.security.KeyPair; import java.security.KeyPairGenerator; +import java.util.ArrayList; import java.util.Base64; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; @@ -39,6 +44,7 @@ import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.PROJECT_ID_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.getClientSettings; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.loadCredential; @@ -46,59 +52,78 @@ public class GoogleCloudStorageClientSettingsTests extends ESTestCase { public void testLoadWithEmptySettings() { - Map clientsSettings = GoogleCloudStorageClientSettings.load(Settings.EMPTY); + final Map clientsSettings = GoogleCloudStorageClientSettings.load(Settings.EMPTY); assertEquals(1, clientsSettings.size()); assertNotNull(clientsSettings.get("default")); } public void testLoad() throws Exception { final int nbClients = randomIntBetween(1, 5); - final Tuple, Settings> randomClients = randomClients(nbClients); + final List> deprecationWarnings = new ArrayList<>(); + final Tuple, Settings> randomClients = randomClients(nbClients, deprecationWarnings); final Map expectedClientsSettings = randomClients.v1(); - Map actualClientsSettings = GoogleCloudStorageClientSettings.load(randomClients.v2()); + final Map actualClientsSettings = GoogleCloudStorageClientSettings + .load(randomClients.v2()); assertEquals(expectedClientsSettings.size(), actualClientsSettings.size()); - for (String clientName : expectedClientsSettings.keySet()) { - GoogleCloudStorageClientSettings actualClientSettings = actualClientsSettings.get(clientName); + for (final String clientName : expectedClientsSettings.keySet()) { + final GoogleCloudStorageClientSettings actualClientSettings = actualClientsSettings.get(clientName); assertNotNull(actualClientSettings); - GoogleCloudStorageClientSettings expectedClientSettings = expectedClientsSettings.get(clientName); + final GoogleCloudStorageClientSettings expectedClientSettings = expectedClientsSettings.get(clientName); assertNotNull(expectedClientSettings); - assertGoogleCredential(expectedClientSettings.getCredential(), actualClientSettings.getCredential()); - assertEquals(expectedClientSettings.getEndpoint(), actualClientSettings.getEndpoint()); + assertEquals(expectedClientSettings.getHost(), actualClientSettings.getHost()); + assertEquals(expectedClientSettings.getProjectId(), actualClientSettings.getProjectId()); assertEquals(expectedClientSettings.getConnectTimeout(), actualClientSettings.getConnectTimeout()); assertEquals(expectedClientSettings.getReadTimeout(), actualClientSettings.getReadTimeout()); assertEquals(expectedClientSettings.getApplicationName(), actualClientSettings.getApplicationName()); } + + if (deprecationWarnings.isEmpty() == false) { + assertSettingDeprecationsAndWarnings(deprecationWarnings.toArray(new Setting[0])); + } } public void testLoadCredential() throws Exception { - Tuple, Settings> randomClient = randomClients(1); - GoogleCloudStorageClientSettings expectedClientSettings = randomClient.v1().values().iterator().next(); - String clientName = randomClient.v1().keySet().iterator().next(); - + final List> deprecationWarnings = new ArrayList<>(); + final Tuple, Settings> randomClient = randomClients(1, deprecationWarnings); + final GoogleCloudStorageClientSettings expectedClientSettings = randomClient.v1().values().iterator().next(); + final String clientName = randomClient.v1().keySet().iterator().next(); assertGoogleCredential(expectedClientSettings.getCredential(), loadCredential(randomClient.v2(), clientName)); } + public void testProjectIdDefaultsToCredentials() throws Exception { + final String clientName = randomAlphaOfLength(5); + final Tuple credentials = randomCredential(clientName); + final ServiceAccountCredentials credential = credentials.v1(); + final GoogleCloudStorageClientSettings googleCloudStorageClientSettings = new GoogleCloudStorageClientSettings(credential, + ENDPOINT_SETTING.getDefault(Settings.EMPTY), PROJECT_ID_SETTING.getDefault(Settings.EMPTY), + CONNECT_TIMEOUT_SETTING.getDefault(Settings.EMPTY), READ_TIMEOUT_SETTING.getDefault(Settings.EMPTY), + APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY), new URI("")); + assertEquals(credential.getProjectId(), googleCloudStorageClientSettings.getProjectId()); + } + /** Generates a given number of GoogleCloudStorageClientSettings along with the Settings to build them from **/ - private Tuple, Settings> randomClients(final int nbClients) throws Exception { + private Tuple, Settings> randomClients(final int nbClients, + final List> deprecationWarnings) + throws Exception { final Map expectedClients = new HashMap<>(); - expectedClients.put("default", getClientSettings(Settings.EMPTY, "default")); final Settings.Builder settings = Settings.builder(); final MockSecureSettings secureSettings = new MockSecureSettings(); for (int i = 0; i < nbClients; i++) { - String clientName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); - - GoogleCloudStorageClientSettings clientSettings = randomClient(clientName, settings, secureSettings); + final String clientName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + final GoogleCloudStorageClientSettings clientSettings = randomClient(clientName, settings, secureSettings, deprecationWarnings); expectedClients.put(clientName, clientSettings); } if (randomBoolean()) { - GoogleCloudStorageClientSettings clientSettings = randomClient("default", settings, secureSettings); + final GoogleCloudStorageClientSettings clientSettings = randomClient("default", settings, secureSettings, deprecationWarnings); expectedClients.put("default", clientSettings); + } else { + expectedClients.put("default", getClientSettings(Settings.EMPTY, "default")); } return Tuple.tuple(expectedClients, settings.setSecureSettings(secureSettings).build()); @@ -107,20 +132,30 @@ private Tuple, Settings> randomCli /** Generates a random GoogleCloudStorageClientSettings along with the Settings to build it **/ private static GoogleCloudStorageClientSettings randomClient(final String clientName, final Settings.Builder settings, - final MockSecureSettings secureSettings) throws Exception { + final MockSecureSettings secureSettings, + final List> deprecationWarnings) throws Exception { - Tuple credentials = randomCredential(clientName); - GoogleCredential credential = credentials.v1(); + final Tuple credentials = randomCredential(clientName); + final ServiceAccountCredentials credential = credentials.v1(); secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).getKey(), credentials.v2()); String endpoint; if (randomBoolean()) { - endpoint = randomAlphaOfLength(5); + endpoint = randomFrom("http://www.elastic.co", "http://metadata.google.com:88/oauth", "https://www.googleapis.com", + "https://www.elastic.co:443", "http://localhost:8443", "https://www.googleapis.com/oauth/token"); settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint); } else { endpoint = ENDPOINT_SETTING.getDefault(Settings.EMPTY); } + String projectId; + if (randomBoolean()) { + projectId = randomAlphaOfLength(5); + settings.put(PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectId); + } else { + projectId = PROJECT_ID_SETTING.getDefault(Settings.EMPTY); + } + TimeValue connectTimeout; if (randomBoolean()) { connectTimeout = randomTimeout(); @@ -141,40 +176,35 @@ private static GoogleCloudStorageClientSettings randomClient(final String client if (randomBoolean()) { applicationName = randomAlphaOfLength(5); settings.put(APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), applicationName); + deprecationWarnings.add(APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName)); } else { applicationName = APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY); } - return new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, readTimeout, applicationName); + return new GoogleCloudStorageClientSettings(credential, endpoint, projectId, connectTimeout, readTimeout, applicationName, + new URI("")); } /** Generates a random GoogleCredential along with its corresponding Service Account file provided as a byte array **/ - private static Tuple randomCredential(final String clientName) throws Exception { - KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); - - GoogleCredential.Builder credentialBuilder = new GoogleCredential.Builder(); - credentialBuilder.setServiceAccountId(clientName); - credentialBuilder.setServiceAccountProjectId("project_id_" + clientName); - credentialBuilder.setServiceAccountScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); - credentialBuilder.setServiceAccountPrivateKey(keyPair.getPrivate()); - credentialBuilder.setServiceAccountPrivateKeyId("private_key_id_" + clientName); - - String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); - String serviceAccount = "{\"type\":\"service_account\"," + + private static Tuple randomCredential(final String clientName) throws Exception { + final KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); + final ServiceAccountCredentials.Builder credentialBuilder = ServiceAccountCredentials.newBuilder(); + credentialBuilder.setClientId("id_" + clientName); + credentialBuilder.setClientEmail(clientName); + credentialBuilder.setProjectId("project_id_" + clientName); + credentialBuilder.setPrivateKey(keyPair.getPrivate()); + credentialBuilder.setPrivateKeyId("private_key_id_" + clientName); + credentialBuilder.setScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); + final String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); + final String serviceAccount = "{\"type\":\"service_account\"," + "\"project_id\":\"project_id_" + clientName + "\"," + "\"private_key_id\":\"private_key_id_" + clientName + "\"," + "\"private_key\":\"-----BEGIN PRIVATE KEY-----\\n" + encodedPrivateKey + "\\n-----END PRIVATE KEY-----\\n\"," + "\"client_email\":\"" + clientName + "\"," + - "\"client_id\":\"id_" + clientName + "\"," + - "\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\"," + - "\"token_uri\":\"https://accounts.google.com/o/oauth2/token\"," + - "\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"," + - "\"client_x509_cert_url\":\"https://www.googleapis.com/robot/v1/metadata/x509/" + - clientName + - "%40appspot.gserviceaccount.com\"}"; - + "\"client_id\":\"id_" + clientName + "\"" + + "}"; return Tuple.tuple(credentialBuilder.build(), serviceAccount.getBytes(StandardCharsets.UTF_8)); } @@ -182,14 +212,16 @@ private static TimeValue randomTimeout() { return randomFrom(TimeValue.MINUS_ONE, TimeValue.ZERO, TimeValue.parseTimeValue(randomPositiveTimeValue(), "test")); } - private static void assertGoogleCredential(final GoogleCredential expected, final GoogleCredential actual) { + private static void assertGoogleCredential(ServiceAccountCredentials expected, ServiceAccountCredentials actual) { if (expected != null) { assertEquals(expected.getServiceAccountUser(), actual.getServiceAccountUser()); - assertEquals(expected.getServiceAccountId(), actual.getServiceAccountId()); - assertEquals(expected.getServiceAccountProjectId(), actual.getServiceAccountProjectId()); - assertEquals(expected.getServiceAccountScopesAsString(), actual.getServiceAccountScopesAsString()); - assertEquals(expected.getServiceAccountPrivateKey(), actual.getServiceAccountPrivateKey()); - assertEquals(expected.getServiceAccountPrivateKeyId(), actual.getServiceAccountPrivateKeyId()); + assertEquals(expected.getClientId(), actual.getClientId()); + assertEquals(expected.getClientEmail(), actual.getClientEmail()); + assertEquals(expected.getAccount(), actual.getAccount()); + assertEquals(expected.getProjectId(), actual.getProjectId()); + assertEquals(expected.getScopes(), actual.getScopes()); + assertEquals(expected.getPrivateKey(), actual.getPrivateKey()); + assertEquals(expected.getPrivateKeyId(), actual.getPrivateKeyId()); } else { assertNull(actual); } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 44897819fd9e3..a33ae90c549bc 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -19,79 +19,65 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.http.GenericUrl; -import com.google.api.client.http.HttpIOExceptionHandler; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestFactory; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpResponse; -import com.google.api.client.http.HttpUnsuccessfulResponseHandler; -import com.google.api.client.testing.http.MockHttpTransport; +import com.google.auth.Credentials; +import com.google.cloud.http.HttpTransportOptions; +import com.google.cloud.storage.Storage; + +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import java.util.Collections; +import java.util.Locale; -import java.io.IOException; - -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class GoogleCloudStorageServiceTests extends ESTestCase { - /** - * Test that the {@link GoogleCloudStorageService.DefaultHttpRequestInitializer} attaches new instances - * of {@link HttpIOExceptionHandler} and {@link HttpUnsuccessfulResponseHandler} for every HTTP requests. - */ - public void testDefaultHttpRequestInitializer() throws IOException { + public void testClientInitializer() throws Exception { + final String clientName = randomAlphaOfLength(4).toLowerCase(Locale.ROOT); final Environment environment = mock(Environment.class); - when(environment.settings()).thenReturn(Settings.EMPTY); - - final GoogleCredential credential = mock(GoogleCredential.class); - when(credential.handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean())).thenReturn(false); - - final TimeValue readTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); - final TimeValue connectTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); - final String endpoint = randomBoolean() ? randomAlphaOfLength(10) : null; - final String applicationName = randomBoolean() ? randomAlphaOfLength(10) : null; - - final GoogleCloudStorageClientSettings clientSettings = - new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, readTimeout, applicationName); - - final HttpRequestInitializer initializer = GoogleCloudStorageService.createRequestInitializer(clientSettings); - final HttpRequestFactory requestFactory = new MockHttpTransport().createRequestFactory(initializer); - - final HttpRequest request1 = requestFactory.buildGetRequest(new GenericUrl()); - assertEquals((int) connectTimeout.millis(), request1.getConnectTimeout()); - assertEquals((int) readTimeout.millis(), request1.getReadTimeout()); - assertSame(credential, request1.getInterceptor()); - assertNotNull(request1.getIOExceptionHandler()); - assertNotNull(request1.getUnsuccessfulResponseHandler()); - - final HttpRequest request2 = requestFactory.buildGetRequest(new GenericUrl()); - assertEquals((int) connectTimeout.millis(), request2.getConnectTimeout()); - assertEquals((int) readTimeout.millis(), request2.getReadTimeout()); - assertSame(request1.getInterceptor(), request2.getInterceptor()); - assertNotNull(request2.getIOExceptionHandler()); - assertNotSame(request1.getIOExceptionHandler(), request2.getIOExceptionHandler()); - assertNotNull(request2.getUnsuccessfulResponseHandler()); - assertNotSame(request1.getUnsuccessfulResponseHandler(), request2.getUnsuccessfulResponseHandler()); - - request1.getUnsuccessfulResponseHandler().handleResponse(null, null, false); - verify(credential, times(1)).handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean()); - - request2.getUnsuccessfulResponseHandler().handleResponse(null, null, false); - verify(credential, times(2)).handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean()); + final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final String applicationName = randomAlphaOfLength(4); + final String hostName = randomFrom("http://", "https://") + randomAlphaOfLength(4) + ":" + randomIntBetween(1, 65535); + final String projectIdName = randomAlphaOfLength(4); + final Settings settings = Settings.builder() + .put(GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + connectTimeValue.getStringRep()) + .put(GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + readTimeValue.getStringRep()) + .put(GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + applicationName) + .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), hostName) + .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) + .build(); + when(environment.settings()).thenReturn(settings); + final GoogleCloudStorageClientSettings clientSettings = GoogleCloudStorageClientSettings.getClientSettings(settings, clientName); + final GoogleCloudStorageService service = new GoogleCloudStorageService(environment, + Collections.singletonMap(clientName, clientSettings)); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.createClient("another_client")); + assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); + assertSettingDeprecationsAndWarnings( + new Setting[] { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName) }); + final Storage storage = service.createClient(clientName); + assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName)); + assertThat(storage.getOptions().getHost(), Matchers.is(hostName)); + assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName)); + assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class)); + assertThat(((HttpTransportOptions) storage.getOptions().getTransportOptions()).getConnectTimeout(), + Matchers.is((int) connectTimeValue.millis())); + assertThat(((HttpTransportOptions) storage.getOptions().getTransportOptions()).getReadTimeout(), + Matchers.is((int) readTimeValue.millis())); + assertThat(storage.getOptions().getCredentials(), Matchers.nullValue(Credentials.class)); } public void testToTimeout() { - assertNull(GoogleCloudStorageService.toTimeout(null)); - assertNull(GoogleCloudStorageService.toTimeout(TimeValue.ZERO)); + assertEquals(-1, GoogleCloudStorageService.toTimeout(null).intValue()); + assertEquals(-1, GoogleCloudStorageService.toTimeout(TimeValue.ZERO).intValue()); assertEquals(0, GoogleCloudStorageService.toTimeout(TimeValue.MINUS_ONE).intValue()); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 325cea132beb6..2b52b7a32a9cc 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -19,289 +19,478 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; -import com.google.api.client.http.AbstractInputStreamContent; -import com.google.api.client.http.HttpHeaders; -import com.google.api.client.http.HttpMethods; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpResponseException; -import com.google.api.client.http.LowLevelHttpRequest; -import com.google.api.client.http.LowLevelHttpResponse; -import com.google.api.client.http.MultipartContent; -import com.google.api.client.json.JsonFactory; -import com.google.api.client.testing.http.MockHttpTransport; -import com.google.api.client.testing.http.MockLowLevelHttpRequest; -import com.google.api.client.testing.http.MockLowLevelHttpResponse; -import com.google.api.services.storage.Storage; -import com.google.api.services.storage.model.Bucket; -import com.google.api.services.storage.model.StorageObject; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.rest.RestStatus; +import com.google.api.gax.paging.Page; +import com.google.cloud.Policy; +import com.google.cloud.ReadChannel; +import com.google.cloud.RestorableState; +import com.google.cloud.WriteChannel; +import com.google.cloud.storage.Acl; +import com.google.cloud.storage.Blob; +import com.google.cloud.storage.BlobId; +import com.google.cloud.storage.BlobInfo; +import com.google.cloud.storage.Bucket; +import com.google.cloud.storage.BucketInfo; +import com.google.cloud.storage.CopyWriter; +import com.google.cloud.storage.ServiceAccount; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageBatch; +import com.google.cloud.storage.StorageException; +import com.google.cloud.storage.StorageOptions; +import com.google.cloud.storage.StorageRpcOptionUtils; +import com.google.cloud.storage.StorageTestUtils; + +import org.elasticsearch.core.internal.io.IOUtils; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.math.BigInteger; +import java.net.URL; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; import java.util.ArrayList; +import java.util.List; +import java.util.Objects; import java.util.concurrent.ConcurrentMap; - -import static org.mockito.Mockito.mock; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; /** * {@link MockStorage} mocks a {@link Storage} client by storing all the blobs * in a given concurrent map. */ -class MockStorage extends Storage { - - /* A custom HTTP header name used to propagate the name of the blobs to delete in batch requests */ - private static final String DELETION_HEADER = "x-blob-to-delete"; +class MockStorage implements Storage { private final String bucketName; private final ConcurrentMap blobs; MockStorage(final String bucket, final ConcurrentMap blobs) { - super(new MockedHttpTransport(blobs), mock(JsonFactory.class), mock(HttpRequestInitializer.class)); - this.bucketName = bucket; - this.blobs = blobs; + this.bucketName = Objects.requireNonNull(bucket); + this.blobs = Objects.requireNonNull(blobs); } @Override - public Buckets buckets() { - return new MockBuckets(); + public Bucket get(String bucket, BucketGetOption... options) { + if (bucketName.equals(bucket)) { + return StorageTestUtils.createBucket(this, bucketName); + } else { + return null; + } } @Override - public Objects objects() { - return new MockObjects(); + public Blob get(BlobId blob) { + if (bucketName.equals(blob.getBucket())) { + final byte[] bytes = blobs.get(blob.getName()); + if (bytes != null) { + return StorageTestUtils.createBlob(this, bucketName, blob.getName(), bytes.length); + } + } + return null; } - class MockBuckets extends Buckets { + @Override + public boolean delete(BlobId blob) { + if (bucketName.equals(blob.getBucket()) && blobs.containsKey(blob.getName())) { + return blobs.remove(blob.getName()) != null; + } + return false; + } - @Override - public Get get(String getBucket) { - return new Get(getBucket) { - @Override - public Bucket execute() { - if (bucketName.equals(getBucket())) { - Bucket bucket = new Bucket(); - bucket.setId(bucketName); - return bucket; - } else { - return null; - } - } - }; + @Override + public List delete(Iterable blobIds) { + final List ans = new ArrayList<>(); + for (final BlobId blobId : blobIds) { + ans.add(delete(blobId)); } + return ans; } - class MockObjects extends Objects { + @Override + public Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options) { + if (bucketName.equals(blobInfo.getBucket()) == false) { + throw new StorageException(404, "Bucket not found"); + } + blobs.put(blobInfo.getName(), content); + return get(BlobId.of(blobInfo.getBucket(), blobInfo.getName())); + } + + @Override + public CopyWriter copy(CopyRequest copyRequest) { + if (bucketName.equals(copyRequest.getSource().getBucket()) == false) { + throw new StorageException(404, "Source bucket not found"); + } + if (bucketName.equals(copyRequest.getTarget().getBucket()) == false) { + throw new StorageException(404, "Target bucket not found"); + } + + final byte[] bytes = blobs.get(copyRequest.getSource().getName()); + if (bytes == null) { + throw new StorageException(404, "Source blob does not exist"); + } + blobs.put(copyRequest.getTarget().getName(), bytes); + return StorageRpcOptionUtils + .createCopyWriter(get(BlobId.of(copyRequest.getTarget().getBucket(), copyRequest.getTarget().getName()))); + } + + @Override + public Page list(String bucket, BlobListOption... options) { + if (bucketName.equals(bucket) == false) { + throw new StorageException(404, "Bucket not found"); + } + final Storage storage = this; + final String prefix = StorageRpcOptionUtils.getPrefix(options); - @Override - public Get get(String getBucket, String getObject) { - return new Get(getBucket, getObject) { + return new Page() { + @Override + public boolean hasNextPage() { + return false; + } + + @Override + public String getNextPageToken() { + return null; + } + + @Override + public Page getNextPage() { + throw new UnsupportedOperationException(); + } + + @Override + public Iterable iterateAll() { + return blobs.entrySet().stream() + .filter(blob -> ((prefix == null) || blob.getKey().startsWith(prefix))) + .map(blob -> StorageTestUtils.createBlob(storage, bucketName, blob.getKey(), blob.getValue().length)) + .collect(Collectors.toList()); + } + + @Override + public Iterable getValues() { + throw new UnsupportedOperationException(); + } + }; + } + + @Override + public ReadChannel reader(BlobId blob, BlobSourceOption... options) { + if (bucketName.equals(blob.getBucket())) { + final byte[] bytes = blobs.get(blob.getName()); + final ReadableByteChannel readableByteChannel = Channels.newChannel(new ByteArrayInputStream(bytes)); + return new ReadChannel() { @Override - public StorageObject execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - if (blobs.containsKey(getObject()) == false) { - throw newObjectNotFoundException(getObject()); - } - - StorageObject storageObject = new StorageObject(); - storageObject.setId(getObject()); - return storageObject; + public void close() { + IOUtils.closeWhileHandlingException(readableByteChannel); } @Override - public InputStream executeMediaAsInputStream() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - if (blobs.containsKey(getObject()) == false) { - throw newObjectNotFoundException(getObject()); - } - return new ByteArrayInputStream(blobs.get(getObject())); + public void seek(long position) throws IOException { + throw new UnsupportedOperationException(); } - }; - } - @Override - public Insert insert(String insertBucket, StorageObject insertObject, AbstractInputStreamContent insertStream) { - return new Insert(insertBucket, insertObject) { @Override - public StorageObject execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - - ByteArrayOutputStream out = new ByteArrayOutputStream(); - Streams.copy(insertStream.getInputStream(), out); - blobs.put(getName(), out.toByteArray()); - return null; + public void setChunkSize(int chunkSize) { + throw new UnsupportedOperationException(); + } + + @Override + public RestorableState capture() { + throw new UnsupportedOperationException(); + } + + @Override + public int read(ByteBuffer dst) throws IOException { + return readableByteChannel.read(dst); } - }; - } - @Override - public List list(String listBucket) { - return new List(listBucket) { @Override - public com.google.api.services.storage.model.Objects execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - - final com.google.api.services.storage.model.Objects objects = new com.google.api.services.storage.model.Objects(); - - final java.util.List storageObjects = new ArrayList<>(); - for (Entry blob : blobs.entrySet()) { - if (getPrefix() == null || blob.getKey().startsWith(getPrefix())) { - StorageObject storageObject = new StorageObject(); - storageObject.setId(blob.getKey()); - storageObject.setName(blob.getKey()); - storageObject.setSize(BigInteger.valueOf((long) blob.getValue().length)); - storageObjects.add(storageObject); - } - } - - objects.setItems(storageObjects); - return objects; + public boolean isOpen() { + return readableByteChannel.isOpen(); } }; } + return null; + } + + @Override + public WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options) { + if (bucketName.equals(blobInfo.getBucket())) { + final ByteArrayOutputStream output = new ByteArrayOutputStream(); + return new WriteChannel() { + + final WritableByteChannel writableByteChannel = Channels.newChannel(output); - @Override - public Delete delete(String deleteBucket, String deleteObject) { - return new Delete(deleteBucket, deleteObject) { @Override - public Void execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } + public void setChunkSize(int chunkSize) { + throw new UnsupportedOperationException(); + } - if (blobs.containsKey(getObject()) == false) { - throw newObjectNotFoundException(getObject()); - } + @Override + public RestorableState capture() { + throw new UnsupportedOperationException(); + } - blobs.remove(getObject()); - return null; + @Override + public int write(ByteBuffer src) throws IOException { + return writableByteChannel.write(src); } @Override - public HttpRequest buildHttpRequest() throws IOException { - HttpRequest httpRequest = super.buildHttpRequest(); - httpRequest.getHeaders().put(DELETION_HEADER, getObject()); - return httpRequest; + public boolean isOpen() { + return writableByteChannel.isOpen(); } - }; - } - @Override - public Copy copy(String srcBucket, String srcObject, String destBucket, String destObject, StorageObject content) { - return new Copy(srcBucket, srcObject, destBucket, destObject, content) { @Override - public StorageObject execute() throws IOException { - if (bucketName.equals(getSourceBucket()) == false) { - throw newBucketNotFoundException(getSourceBucket()); - } - if (bucketName.equals(getDestinationBucket()) == false) { - throw newBucketNotFoundException(getDestinationBucket()); - } - - final byte[] bytes = blobs.get(getSourceObject()); - if (bytes == null) { - throw newObjectNotFoundException(getSourceObject()); - } - blobs.put(getDestinationObject(), bytes); - - StorageObject storageObject = new StorageObject(); - storageObject.setId(getDestinationObject()); - return storageObject; + public void close() throws IOException { + IOUtils.closeWhileHandlingException(writableByteChannel); + blobs.put(blobInfo.getName(), output.toByteArray()); } }; } + return null; } - private static GoogleJsonResponseException newBucketNotFoundException(final String bucket) { - HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Bucket not found: " + bucket, new HttpHeaders()); - return new GoogleJsonResponseException(builder, new GoogleJsonError()); + // Everything below this line is not implemented. + + @Override + public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) { + return null; } - private static GoogleJsonResponseException newObjectNotFoundException(final String object) { - HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Object not found: " + object, new HttpHeaders()); - return new GoogleJsonResponseException(builder, new GoogleJsonError()); + @Override + public Blob create(BlobInfo blobInfo, BlobTargetOption... options) { + return null; } - /** - * {@link MockedHttpTransport} extends the existing testing transport to analyze the content - * of {@link com.google.api.client.googleapis.batch.BatchRequest} and delete the appropriates - * blobs. We use this because {@link Storage#batch()} is final and there is no other way to - * extend batch requests for testing purposes. - */ - static class MockedHttpTransport extends MockHttpTransport { + @Override + public Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options) { + return null; + } - private final ConcurrentMap blobs; + @Override + public Blob get(String bucket, String blob, BlobGetOption... options) { + return null; + } - MockedHttpTransport(final ConcurrentMap blobs) { - this.blobs = blobs; - } + @Override + public Blob get(BlobId blob, BlobGetOption... options) { + return null; + } - @Override - public LowLevelHttpRequest buildRequest(final String method, final String url) throws IOException { - // We analyze the content of the Batch request to detect our custom HTTP header, - // and extract from it the name of the blob to delete. Then we reply a simple - // batch response so that the client parser is happy. - // - // See https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch for the - // format of the batch request body. - if (HttpMethods.POST.equals(method) && url.endsWith("/batch")) { - return new MockLowLevelHttpRequest() { - @Override - public LowLevelHttpResponse execute() throws IOException { - final String contentType = new MultipartContent().getType(); - - final StringBuilder builder = new StringBuilder(); - try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - getStreamingContent().writeTo(out); - - Streams.readAllLines(new ByteArrayInputStream(out.toByteArray()), line -> { - if (line != null && line.startsWith(DELETION_HEADER)) { - builder.append("--__END_OF_PART__\r\n"); - builder.append("Content-Type: application/http").append("\r\n"); - builder.append("\r\n"); - builder.append("HTTP/1.1 "); - - final String blobName = line.substring(line.indexOf(':') + 1).trim(); - if (blobs.containsKey(blobName)) { - builder.append(RestStatus.OK.getStatus()); - blobs.remove(blobName); - } else { - builder.append(RestStatus.NOT_FOUND.getStatus()); - } - builder.append("\r\n"); - builder.append("Content-Type: application/json; charset=UTF-8").append("\r\n"); - builder.append("Content-Length: 0").append("\r\n"); - builder.append("\r\n"); - } - }); - builder.append("\r\n"); - builder.append("--__END_OF_PART__--"); - } - - MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); - response.setStatusCode(200); - response.setContent(builder.toString()); - response.setContentType(contentType); - return response; - } - }; - } else { - return super.buildRequest(method, url); - } - } + @Override + public Page list(BucketListOption... options) { + return null; + } + + @Override + public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { + return null; + } + + @Override + public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { + return null; + } + + @Override + public Blob update(BlobInfo blobInfo) { + return null; + } + + @Override + public boolean delete(String bucket, BucketSourceOption... options) { + return false; + } + + @Override + public boolean delete(String bucket, String blob, BlobSourceOption... options) { + return false; + } + + @Override + public boolean delete(BlobId blob, BlobSourceOption... options) { + return false; + } + + @Override + public Blob compose(ComposeRequest composeRequest) { + return null; + } + + @Override + public byte[] readAllBytes(String bucket, String blob, BlobSourceOption... options) { + return new byte[0]; + } + + @Override + public byte[] readAllBytes(BlobId blob, BlobSourceOption... options) { + return new byte[0]; + } + + @Override + public StorageBatch batch() { + return null; + } + + @Override + public ReadChannel reader(String bucket, String blob, BlobSourceOption... options) { + return null; + } + + @Override + public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options) { + return null; + } + + @Override + public List get(BlobId... blobIds) { + return null; + } + + @Override + public List get(Iterable blobIds) { + return null; + } + + @Override + public List update(BlobInfo... blobInfos) { + return null; + } + + @Override + public List update(Iterable blobInfos) { + return null; + } + + @Override + public List delete(BlobId... blobIds) { + return null; + } + + @Override + public Acl getAcl(String bucket, Acl.Entity entity, BucketSourceOption... options) { + return null; + } + + @Override + public Acl getAcl(String bucket, Acl.Entity entity) { + return null; + } + + @Override + public boolean deleteAcl(String bucket, Acl.Entity entity, BucketSourceOption... options) { + return false; + } + + @Override + public boolean deleteAcl(String bucket, Acl.Entity entity) { + return false; + } + + @Override + public Acl createAcl(String bucket, Acl acl, BucketSourceOption... options) { + return null; + } + + @Override + public Acl createAcl(String bucket, Acl acl) { + return null; + } + + @Override + public Acl updateAcl(String bucket, Acl acl, BucketSourceOption... options) { + return null; + } + + @Override + public Acl updateAcl(String bucket, Acl acl) { + return null; + } + + @Override + public List listAcls(String bucket, BucketSourceOption... options) { + return null; + } + + @Override + public List listAcls(String bucket) { + return null; + } + + @Override + public Acl getDefaultAcl(String bucket, Acl.Entity entity) { + return null; + } + + @Override + public boolean deleteDefaultAcl(String bucket, Acl.Entity entity) { + return false; + } + + @Override + public Acl createDefaultAcl(String bucket, Acl acl) { + return null; + } + + @Override + public Acl updateDefaultAcl(String bucket, Acl acl) { + return null; + } + + @Override + public List listDefaultAcls(String bucket) { + return null; + } + + @Override + public Acl getAcl(BlobId blob, Acl.Entity entity) { + return null; + } + + @Override + public boolean deleteAcl(BlobId blob, Acl.Entity entity) { + return false; + } + + @Override + public Acl createAcl(BlobId blob, Acl acl) { + return null; + } + + @Override + public Acl updateAcl(BlobId blob, Acl acl) { + return null; + } + + @Override + public List listAcls(BlobId blob) { + return null; + } + + @Override + public Policy getIamPolicy(String bucket, BucketSourceOption... options) { + return null; + } + + @Override + public Policy setIamPolicy(String bucket, Policy policy, BucketSourceOption... options) { + return null; + } + + @Override + public List testIamPermissions(String bucket, List permissions, BucketSourceOption... options) { + return null; + } + + @Override + public ServiceAccount getServiceAccount(String projectId) { + return null; + } + + @Override + public StorageOptions getOptions() { + return null; } } From ab0be394e958afb1897816630709164864577a75 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 15 May 2018 08:37:50 -0700 Subject: [PATCH 30/74] Remove assert statements from field caps documentation. (#30601) Reorganize the test in `SearchDocumentationIT` so the assertions aren't shown in the generated documentation. --- .../documentation/SearchDocumentationIT.java | 27 ++++++++++--------- .../high-level/search/field-caps.asciidoc | 8 +++--- 2 files changed, 19 insertions(+), 16 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 6fdc60fcb3394..8a12016025c3e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -98,7 +98,6 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -725,22 +724,26 @@ public void testFieldCaps() throws Exception { // end::field-caps-execute // tag::field-caps-response - assertThat(response.get().keySet(), contains("user")); - Map userResponse = response.getField("user"); - - assertThat(userResponse.keySet(), containsInAnyOrder("keyword", "text")); // <1> + Map userResponse = response.getField("user"); // <1> FieldCapabilities textCapabilities = userResponse.get("keyword"); - assertTrue(textCapabilities.isSearchable()); - assertFalse(textCapabilities.isAggregatable()); + boolean isSearchable = textCapabilities.isSearchable(); + boolean isAggregatable = textCapabilities.isAggregatable(); - assertArrayEquals(textCapabilities.indices(), // <2> - new String[]{"authors", "contributors"}); - assertNull(textCapabilities.nonSearchableIndices()); // <3> - assertArrayEquals(textCapabilities.nonAggregatableIndices(), // <4> - new String[]{"authors"}); + String[] indices = textCapabilities.indices(); // <2> + String[] nonSearchableIndices = textCapabilities.nonSearchableIndices(); // <3> + String[] nonAggregatableIndices = textCapabilities.nonAggregatableIndices();//<4> // end::field-caps-response + assertThat(userResponse.keySet(), containsInAnyOrder("keyword", "text")); + + assertTrue(isSearchable); + assertFalse(isAggregatable); + + assertArrayEquals(indices, new String[]{"authors", "contributors"}); + assertNull(nonSearchableIndices); + assertArrayEquals(nonAggregatableIndices, new String[]{"authors"}); + // tag::field-caps-execute-listener ActionListener listener = new ActionListener() { @Override diff --git a/docs/java-rest/high-level/search/field-caps.asciidoc b/docs/java-rest/high-level/search/field-caps.asciidoc index fef30f629ca61..1f5b10ad034df 100644 --- a/docs/java-rest/high-level/search/field-caps.asciidoc +++ b/docs/java-rest/high-level/search/field-caps.asciidoc @@ -76,7 +76,7 @@ information about how each index contributes to the field's capabilities. -------------------------------------------------- include-tagged::{doc-tests}/SearchDocumentationIT.java[field-caps-response] -------------------------------------------------- -<1> The `user` field has two possible types, `keyword` and `text`. -<2> This field only has type `keyword` in the `authors` and `contributors` indices. -<3> Null, since the field is searchable in all indices for which it has the `keyword` type. -<4> The `user` field is not aggregatable in the `authors` index. \ No newline at end of file +<1> A map with entries for the field's possible types, in this case `keyword` and `text`. +<2> All indices where the `user` field has type `keyword`. +<3> The subset of these indices where the `user` field isn't searchable, or null if it's always searchable. +<4> Another subset of these indices where the `user` field isn't aggregatable, or null if it's always aggregatable. \ No newline at end of file From 5894e3574f584ca64cd383776564dd29a873c5b2 Mon Sep 17 00:00:00 2001 From: lcawl Date: Tue, 15 May 2018 08:47:48 -0700 Subject: [PATCH 31/74] [DOCS] Restores 7.0.0 release notes and highlights --- docs/reference/index-shared4.asciidoc | 4 ++++ docs/reference/release-notes/highlights.asciidoc | 3 +++ 2 files changed, 7 insertions(+) diff --git a/docs/reference/index-shared4.asciidoc b/docs/reference/index-shared4.asciidoc index f4e87b4e9e8fc..3dc9e4f5e07cf 100644 --- a/docs/reference/index-shared4.asciidoc +++ b/docs/reference/index-shared4.asciidoc @@ -4,3 +4,7 @@ include::how-to.asciidoc[] include::testing.asciidoc[] include::glossary.asciidoc[] + +include::release-notes/highlights.asciidoc[] + +include::release-notes.asciidoc[] \ No newline at end of file diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 1223e9a685a27..0ab4106c22c1f 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,5 +1,8 @@ [[release-highlights]] = {es} Release Highlights +++++ +Release Highlights +++++ [partintro] -- From 21b9170dec8601b5509142a0fb3fa5389614232d Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 May 2018 12:13:24 -0700 Subject: [PATCH 32/74] Security: Remove SecurityLifecycleService (#30526) This commit removes the SecurityLifecycleService, relegating its former functions of listening for cluster state updates to SecurityIndexManager and IndexAuditTrail. --- .../xpack/security/Security.java | 33 ++--- .../security/SecurityLifecycleService.java | 129 ------------------ .../security/audit/index/IndexAuditTrail.java | 35 ++++- .../security/authc/ExpiredTokenRemover.java | 4 +- .../xpack/security/authc/InternalRealms.java | 6 +- .../xpack/security/authc/TokenService.java | 49 ++++--- .../authc/esnative/NativeUsersStore.java | 38 +++--- .../authc/esnative/ReservedRealm.java | 12 +- .../mapper/NativeRoleMappingStore.java | 31 ++--- .../security/authz/AuthorizationService.java | 6 +- .../security/authz/AuthorizedIndices.java | 4 +- .../authz/store/CompositeRolesStore.java | 3 - .../authz/store/NativeRolesStore.java | 41 +++--- .../support/SecurityIndexManager.java | 23 +++- .../integration/ClearRolesCacheTests.java | 6 +- .../test/SecurityIntegTestCase.java | 2 +- .../xpack/security/SecurityTests.java | 2 +- ...sportSamlInvalidateSessionActionTests.java | 5 +- .../saml/TransportSamlLogoutActionTests.java | 5 +- .../user/TransportGetUsersActionTests.java | 13 +- .../user/TransportPutUserActionTests.java | 5 +- .../authc/AuthenticationServiceTests.java | 8 +- .../security/authc/InternalRealmsTests.java | 8 +- .../security/authc/TokenAuthIntegTests.java | 10 +- .../security/authc/TokenServiceTests.java | 34 ++--- .../esnative/ESNativeMigrateToolTests.java | 7 +- .../authc/esnative/NativeRealmIntegTests.java | 2 +- .../authc/esnative/NativeUsersStoreTests.java | 11 +- .../authc/esnative/ReservedRealmTests.java | 32 ++--- .../mapper/NativeRoleMappingStoreTests.java | 7 +- .../authz/AuthorizationServiceTests.java | 2 +- .../authz/AuthorizedIndicesTests.java | 8 +- .../authz/IndicesAndAliasesResolverTests.java | 14 +- .../authz/store/NativeRolesStoreTests.java | 13 +- .../support/SecurityIndexManagerTests.java | 6 +- .../security/test/SecurityTestUtils.java | 2 +- .../xpack/security/user/XPackUserTests.java | 6 +- 37 files changed, 247 insertions(+), 375 deletions(-) delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 4e4f86baec768..44f5afd4bdb07 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -7,6 +7,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -16,6 +17,7 @@ import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; @@ -232,7 +234,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_TEMPLATE_NAME; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_INDEX_FORMAT; public class Security extends Plugin implements ActionPlugin, IngestPlugin, NetworkPlugin, ClusterPlugin, @@ -261,6 +263,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw private final SetOnce threadContext = new SetOnce<>(); private final SetOnce tokenService = new SetOnce<>(); private final SetOnce securityActionFilter = new SetOnce<>(); + private final SetOnce securityIndex = new SetOnce<>(); + private final SetOnce indexAuditTrail = new SetOnce<>(); private final List bootstrapChecks; private final List securityExtensions = new ArrayList<>(); @@ -368,7 +372,6 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste components.add(securityContext.get()); // audit trails construction - IndexAuditTrail indexAuditTrail = null; Set auditTrails = new LinkedHashSet<>(); if (XPackSettings.AUDIT_ENABLED.get(settings)) { List outputs = AUDIT_OUTPUTS_SETTING.get(settings); @@ -383,8 +386,8 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste auditTrails.add(new LoggingAuditTrail(settings, clusterService, threadPool)); break; case IndexAuditTrail.NAME: - indexAuditTrail = new IndexAuditTrail(settings, client, threadPool, clusterService); - auditTrails.add(indexAuditTrail); + indexAuditTrail.set(new IndexAuditTrail(settings, client, threadPool, clusterService)); + auditTrails.add(indexAuditTrail.get()); break; default: throw new IllegalArgumentException("Unknown audit trail output [" + output + "]"); @@ -396,20 +399,20 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste components.add(auditTrailService); this.auditTrailService.set(auditTrailService); - final SecurityLifecycleService securityLifecycleService = - new SecurityLifecycleService(settings, clusterService, threadPool, client, indexAuditTrail); - final TokenService tokenService = new TokenService(settings, Clock.systemUTC(), client, securityLifecycleService, clusterService); + securityIndex.set(new SecurityIndexManager(settings, client, SecurityIndexManager.SECURITY_INDEX_NAME, clusterService)); + + final TokenService tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex.get(), clusterService); this.tokenService.set(tokenService); components.add(tokenService); // realms construction - final NativeUsersStore nativeUsersStore = new NativeUsersStore(settings, client, securityLifecycleService); - final NativeRoleMappingStore nativeRoleMappingStore = new NativeRoleMappingStore(settings, client, securityLifecycleService); + final NativeUsersStore nativeUsersStore = new NativeUsersStore(settings, client, securityIndex.get()); + final NativeRoleMappingStore nativeRoleMappingStore = new NativeRoleMappingStore(settings, client, securityIndex.get()); final AnonymousUser anonymousUser = new AnonymousUser(settings); final ReservedRealm reservedRealm = new ReservedRealm(env, settings, nativeUsersStore, - anonymousUser, securityLifecycleService, threadPool.getThreadContext()); + anonymousUser, securityIndex.get(), threadPool.getThreadContext()); Map realmFactories = new HashMap<>(InternalRealms.getFactories(threadPool, resourceWatcherService, - getSslService(), nativeUsersStore, nativeRoleMappingStore, securityLifecycleService)); + getSslService(), nativeUsersStore, nativeRoleMappingStore, securityIndex.get())); for (SecurityExtension extension : securityExtensions) { Map newRealms = extension.getRealms(resourceWatcherService); for (Map.Entry entry : newRealms.entrySet()) { @@ -424,7 +427,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste components.add(realms); components.add(reservedRealm); - securityLifecycleService.securityIndex().addIndexStateListener(nativeRoleMappingStore::onSecurityIndexStateChange); + securityIndex.get().addIndexStateListener(nativeRoleMappingStore::onSecurityIndexStateChange); AuthenticationFailureHandler failureHandler = null; String extensionName = null; @@ -449,7 +452,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste components.add(authcService.get()); final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState()); - final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, getLicenseState(), securityLifecycleService); + final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, getLicenseState(), securityIndex.get()); final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(); List, ActionListener>>> rolesProviders = new ArrayList<>(); for (SecurityExtension extension : securityExtensions) { @@ -457,7 +460,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste } final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, rolesProviders, threadPool.getThreadContext(), getLicenseState()); - securityLifecycleService.securityIndex().addIndexStateListener(allRolesStore::onSecurityIndexStateChange); + securityIndex.get().addIndexStateListener(allRolesStore::onSecurityIndexStateChange); // to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be // minimal getLicenseState().addListener(allRolesStore::invalidateAll); @@ -468,8 +471,6 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste components.add(allRolesStore); // for SecurityFeatureSet and clear roles cache components.add(authzService); - components.add(securityLifecycleService); - ipFilter.set(new IPFilter(settings, auditTrailService, clusterService.getClusterSettings(), getLicenseState())); components.add(ipFilter.get()); DestructiveOperations destructiveOperations = new DestructiveOperations(settings, clusterService.getClusterSettings()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java deleted file mode 100644 index 9d05ec3b71d6b..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.health.ClusterIndexHealth; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.component.LifecycleListener; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; -import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail; -import org.elasticsearch.xpack.security.support.SecurityIndexManager; - -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.function.BiConsumer; -import java.util.function.Consumer; -import java.util.function.Predicate; - -/** - * This class is used to provide a lifecycle for services that is based on the cluster's state - * rather than the typical lifecycle that is used to start services as part of the node startup. - * - * This type of lifecycle is necessary for services that need to perform actions that require the - * cluster to be in a certain state; some examples are storing index templates and creating indices. - * These actions would most likely fail from within a plugin if executed in the - * {@link org.elasticsearch.common.component.AbstractLifecycleComponent#doStart()} method. - * However, if the startup of these services waits for the cluster to form and recover indices then - * it will be successful. This lifecycle service allows for this to happen by listening for - * {@link ClusterChangedEvent} and checking if the services can start. Additionally, the service - * also provides hooks for stop and close functionality. - */ -public class SecurityLifecycleService extends AbstractComponent implements ClusterStateListener { - - public static final String INTERNAL_SECURITY_INDEX = SecurityIndexManager.INTERNAL_SECURITY_INDEX; - public static final String SECURITY_INDEX_NAME = ".security"; - - private final Settings settings; - private final ThreadPool threadPool; - private final IndexAuditTrail indexAuditTrail; - - private final SecurityIndexManager securityIndex; - - public SecurityLifecycleService(Settings settings, ClusterService clusterService, - ThreadPool threadPool, Client client, - @Nullable IndexAuditTrail indexAuditTrail) { - super(settings); - this.settings = settings; - this.threadPool = threadPool; - this.indexAuditTrail = indexAuditTrail; - this.securityIndex = new SecurityIndexManager(settings, client, SECURITY_INDEX_NAME); - clusterService.addListener(this); - clusterService.addLifecycleListener(new LifecycleListener() { - @Override - public void beforeStop() { - close(); - } - }); - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - final ClusterState state = event.state(); - if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { - // wait until the gateway has recovered from disk, otherwise we think we don't have the - // .security index but they may not have been restored from the cluster state on disk - logger.debug("lifecycle service waiting until state has been recovered"); - return; - } - - securityIndex.clusterChanged(event); - - try { - if (Security.indexAuditLoggingEnabled(settings) && - indexAuditTrail.state() == IndexAuditTrail.State.INITIALIZED) { - if (indexAuditTrail.canStart(event)) { - threadPool.generic().execute(new AbstractRunnable() { - - @Override - public void onFailure(Exception throwable) { - logger.error("failed to start index audit trail services", throwable); - assert false : "security lifecycle services startup failed"; - } - - @Override - public void doRun() { - indexAuditTrail.start(); - } - }); - } - } - } catch (Exception e) { - logger.error("failed to start index audit trail", e); - } - } - - public SecurityIndexManager securityIndex() { - return securityIndex; - } - - // this is called in a lifecycle listener beforeStop on the cluster service - private void close() { - if (indexAuditTrail != null) { - try { - indexAuditTrail.stop(); - } catch (Exception e) { - logger.error("failed to stop audit trail module", e); - } - } - } - - public static List indexNames() { - return Collections.unmodifiableList(Arrays.asList(SECURITY_INDEX_NAME, INTERNAL_SECURITY_INDEX)); - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java index 590c2bc5ecd4e..db7475a89727f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java @@ -20,6 +20,7 @@ import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -29,12 +30,14 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -110,7 +113,7 @@ /** * Audit trail implementation that writes events into an index. */ -public class IndexAuditTrail extends AbstractComponent implements AuditTrail { +public class IndexAuditTrail extends AbstractComponent implements AuditTrail, ClusterStateListener { public static final String NAME = "index"; public static final String DOC_TYPE = "doc"; @@ -199,6 +202,13 @@ public IndexAuditTrail(Settings settings, Client client, ThreadPool threadPool, } else { this.client = initializeRemoteClient(settings, logger); } + clusterService.addListener(this); + clusterService.addLifecycleListener(new LifecycleListener() { + @Override + public void beforeStop() { + stop(); + } + }); } @@ -206,6 +216,29 @@ public State state() { return state.get(); } + @Override + public void clusterChanged(ClusterChangedEvent event) { + try { + if (state() == IndexAuditTrail.State.INITIALIZED && canStart(event)) { + threadPool.generic().execute(new AbstractRunnable() { + + @Override + public void onFailure(Exception throwable) { + logger.error("failed to start index audit trail services", throwable); + assert false : "security lifecycle services startup failed"; + } + + @Override + public void doRun() { + start(); + } + }); + } + } catch (Exception e) { + logger.error("failed to start index audit trail", e); + } + } + /** * This method determines if this service can be started based on the state in the {@link ClusterChangedEvent} and * if the node is the master or not. When using remote indexing, a call to the remote cluster will be made to retrieve diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java index a46d6131c6035..b8ae5c944419a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java @@ -22,7 +22,7 @@ import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; -import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.time.Instant; import java.time.temporal.ChronoUnit; @@ -50,7 +50,7 @@ final class ExpiredTokenRemover extends AbstractRunnable { @Override public void doRun() { - SearchRequest searchRequest = new SearchRequest(SecurityLifecycleService.SECURITY_INDEX_NAME); + SearchRequest searchRequest = new SearchRequest(SecurityIndexManager.SECURITY_INDEX_NAME); DeleteByQueryRequest expiredDbq = new DeleteByQueryRequest(searchRequest); if (timeout != TimeValue.MINUS_ONE) { expiredDbq.setTimeout(timeout); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java index b50264a73e949..1e38e6fd10391 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.esnative.NativeRealm; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; @@ -30,6 +29,7 @@ import org.elasticsearch.xpack.security.authc.saml.SamlRealm; import org.elasticsearch.xpack.security.authc.support.RoleMappingFileBootstrapCheck; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.ArrayList; import java.util.Arrays; @@ -90,13 +90,13 @@ static boolean isStandardRealm(String type) { public static Map getFactories(ThreadPool threadPool, ResourceWatcherService resourceWatcherService, SSLService sslService, NativeUsersStore nativeUsersStore, NativeRoleMappingStore nativeRoleMappingStore, - SecurityLifecycleService securityLifecycleService) { + SecurityIndexManager securityIndex) { Map map = new HashMap<>(); map.put(FileRealmSettings.TYPE, config -> new FileRealm(config, resourceWatcherService)); map.put(NativeRealmSettings.TYPE, config -> { final NativeRealm nativeRealm = new NativeRealm(config, nativeUsersStore); - securityLifecycleService.securityIndex().addIndexStateListener(nativeRealm::onSecurityIndexStateChange); + securityIndex.addIndexStateListener(nativeRealm::onSecurityIndexStateChange); return nativeRealm; }); map.put(LdapRealmSettings.AD_TYPE, config -> new LdapRealm(LdapRealmSettings.AD_TYPE, config, sslService, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 28098faa50ea6..d23415f87dfcc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.core.internal.io.IOUtils; -import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ExceptionsHelper; @@ -70,7 +69,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.KeyAndTimestamp; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; -import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import javax.crypto.Cipher; import javax.crypto.CipherInputStream; @@ -161,7 +160,7 @@ public final class TokenService extends AbstractComponent { private final TimeValue expirationDelay; private final TimeValue deleteInterval; private final Client client; - private final SecurityLifecycleService lifecycleService; + private final SecurityIndexManager securityIndex; private final ExpiredTokenRemover expiredTokenRemover; private final boolean enabled; private volatile TokenKeys keyCache; @@ -176,7 +175,7 @@ public final class TokenService extends AbstractComponent { * @param client the client to use when checking for revocations */ public TokenService(Settings settings, Clock clock, Client client, - SecurityLifecycleService lifecycleService, ClusterService clusterService) throws GeneralSecurityException { + SecurityIndexManager securityIndex, ClusterService clusterService) throws GeneralSecurityException { super(settings); byte[] saltArr = new byte[SALT_BYTES]; secureRandom.nextBytes(saltArr); @@ -185,7 +184,7 @@ public TokenService(Settings settings, Clock clock, Client client, this.clock = clock.withZone(ZoneOffset.UTC); this.expirationDelay = TOKEN_EXPIRATION.get(settings); this.client = client; - this.lifecycleService = lifecycleService; + this.securityIndex = securityIndex; this.lastExpirationRunMs = client.threadPool().relativeTimeInMillis(); this.deleteInterval = DELETE_INTERVAL.get(settings); this.enabled = isTokenServiceEnabled(settings); @@ -245,12 +244,12 @@ public void createUserToken(Authentication authentication, Authentication origin .endObject(); builder.endObject(); IndexRequest request = - client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) + client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) .setOpType(OpType.CREATE) .setSource(builder) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .request(); - lifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client, SECURITY_ORIGIN, IndexAction.INSTANCE, request, ActionListener.wrap(indexResponse -> listener.onResponse(new Tuple<>(userToken, refreshToken)), listener::onFailure)) @@ -354,9 +353,9 @@ void decodeToken(String token, ActionListener listener) throws IOExce if (version.onOrAfter(Version.V_6_2_0)) { // we only have the id and need to get the token from the doc! decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId -> - lifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { final GetRequest getRequest = - client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, + client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(tokenId)).request(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, ActionListener.wrap(response -> { @@ -517,14 +516,14 @@ private void indexBwcInvalidation(UserToken userToken, ActionListener l listener.onFailure(invalidGrantException("failed to invalidate token")); } else { final String invalidatedTokenId = getInvalidatedTokenDocumentId(userToken); - IndexRequest indexRequest = client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, invalidatedTokenId) + IndexRequest indexRequest = client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, invalidatedTokenId) .setOpType(OpType.CREATE) .setSource("doc_type", INVALIDATED_TOKEN_DOC_TYPE, "expiration_time", expirationEpochMilli) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .request(); final String tokenDocId = getTokenDocumentId(userToken); final Version version = userToken.getVersion(); - lifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest, ActionListener.wrap(indexResponse -> { ActionListener wrappedListener = @@ -561,12 +560,12 @@ private void indexInvalidation(String tokenDocId, Version version, ActionListene if (attemptCount.get() > 5) { listener.onFailure(invalidGrantException("failed to invalidate token")); } else { - UpdateRequest request = client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, tokenDocId) + UpdateRequest request = client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId) .setDoc(srcPrefix, Collections.singletonMap("invalidated", true)) .setVersion(documentVersion) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .request(); - lifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, ActionListener.wrap(updateResponse -> { if (updateResponse.getGetResult() != null @@ -593,7 +592,7 @@ private void indexInvalidation(String tokenDocId, Version version, ActionListene || isShardNotAvailableException(cause)) { attemptCount.incrementAndGet(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(), + client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(), ActionListener.wrap(getResult -> { if (getResult.isExists()) { Map source = getResult.getSource(); @@ -658,14 +657,14 @@ private void findTokenFromRefreshToken(String refreshToken, ActionListener 5) { listener.onFailure(invalidGrantException("could not refresh the requested token")); } else { - SearchRequest request = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setQuery(QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery("doc_type", "token")) .filter(QueryBuilders.termQuery("refresh_token.token", refreshToken))) .setVersion(true) .request(); - lifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, ActionListener.wrap(searchResponse -> { if (searchResponse.isTimedOut()) { @@ -702,7 +701,7 @@ private void innerRefresh(String tokenDocId, Authentication userAuth, ActionList if (attemptCount.getAndIncrement() > 5) { listener.onFailure(invalidGrantException("could not refresh the requested token")); } else { - GetRequest getRequest = client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(); + GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, ActionListener.wrap(response -> { if (response.isExists()) { @@ -723,7 +722,7 @@ private void innerRefresh(String tokenDocId, Authentication userAuth, ActionList in.setVersion(authVersion); Authentication authentication = new Authentication(in); UpdateRequest updateRequest = - client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, tokenDocId) + client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId) .setVersion(response.getVersion()) .setDoc("refresh_token", Collections.singletonMap("refreshed", true)) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) @@ -838,7 +837,7 @@ public void findActiveTokensForRealm(String realmName, ActionListener supplier = client.threadPool().getThreadContext().newRestorableContext(false); - lifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), this::parseHit)); } @@ -914,14 +913,14 @@ private void ensureEnabled() { * have been explicitly cleared. */ private void checkIfTokenIsRevoked(UserToken userToken, ActionListener listener) { - if (lifecycleService.securityIndex().indexExists() == false) { + if (securityIndex.indexExists() == false) { // index doesn't exist so the token is considered valid. listener.onResponse(userToken); } else { - lifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { MultiGetRequest mGetRequest = client.prepareMultiGet() - .add(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, getInvalidatedTokenDocumentId(userToken)) - .add(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) + .add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getInvalidatedTokenDocumentId(userToken)) + .add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) .request(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, mGetRequest, @@ -989,7 +988,7 @@ private Instant getExpirationTime(Instant now) { } private void maybeStartTokenRemover() { - if (lifecycleService.securityIndex().isAvailable()) { + if (securityIndex.isAvailable()) { if (client.threadPool().relativeTimeInMillis() - lastExpirationRunMs > deleteInterval.getMillis()) { expiredTokenRemover.submit(client.threadPool()); lastExpirationRunMs = client.threadPool().relativeTimeInMillis(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 381053d9633d7..1477c6dc88045 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -50,7 +50,7 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.User.Fields; import org.elasticsearch.xpack.core.security.user.XPackUser; -import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.Arrays; import java.util.Collection; @@ -64,7 +64,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; /** * NativeUsersStore is a store for users that reads from an Elasticsearch index. This store is responsible for fetching the full @@ -83,12 +83,12 @@ public class NativeUsersStore extends AbstractComponent { private final Hasher hasher = Hasher.BCRYPT; private final Client client; - private volatile SecurityLifecycleService securityLifecycleService; + private final SecurityIndexManager securityIndex; - public NativeUsersStore(Settings settings, Client client, SecurityLifecycleService securityLifecycleService) { + public NativeUsersStore(Settings settings, Client client, SecurityIndexManager securityIndex) { super(settings); this.client = client; - this.securityLifecycleService = securityLifecycleService; + this.securityIndex = securityIndex; } /** @@ -114,7 +114,7 @@ public void getUsers(String[] userNames, final ActionListener> } }; - if (securityLifecycleService.securityIndex().indexExists() == false) { + if (securityIndex.indexExists() == false) { // TODO remove this short circuiting and fix tests that fail without this! listener.onResponse(Collections.emptyList()); } else if (userNames.length == 1) { // optimization for single user lookup @@ -123,7 +123,7 @@ public void getUsers(String[] userNames, final ActionListener> (uap) -> listener.onResponse(uap == null ? Collections.emptyList() : Collections.singletonList(uap.user())), handleException)); } else { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { final QueryBuilder query; if (userNames == null || userNames.length == 0) { query = QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), USER_DOC_TYPE); @@ -154,11 +154,11 @@ public void getUsers(String[] userNames, final ActionListener> * Async method to retrieve a user and their password */ private void getUserAndPassword(final String user, final ActionListener listener) { - if (securityLifecycleService.securityIndex().indexExists() == false) { + if (securityIndex.indexExists() == false) { // TODO remove this short circuiting and fix tests that fail without this! listener.onResponse(null); } else { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareGet(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(USER_DOC_TYPE, user)).request(), @@ -199,7 +199,7 @@ public void changePassword(final ChangePasswordRequest request, final ActionList docType = USER_DOC_TYPE; } - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareUpdate(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(docType, username)) .setDoc(Requests.INDEX_CONTENT_TYPE, Fields.PASSWORD.getPreferredName(), @@ -237,7 +237,7 @@ public void onFailure(Exception e) { * has been indexed */ private void createReservedUser(String username, char[] passwordHash, RefreshPolicy refresh, ActionListener listener) { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareIndex(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(RESERVED_USER_TYPE, username)) @@ -279,7 +279,7 @@ public void putUser(final PutUserRequest request, final ActionListener private void updateUserWithoutPassword(final PutUserRequest putUserRequest, final ActionListener listener) { assert putUserRequest.passwordHash() == null; // We must have an existing document - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareUpdate(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(USER_DOC_TYPE, putUserRequest.username())) @@ -322,7 +322,7 @@ public void onFailure(Exception e) { private void indexUser(final PutUserRequest putUserRequest, final ActionListener listener) { assert putUserRequest.passwordHash() != null; - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareIndex(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(USER_DOC_TYPE, putUserRequest.username())) @@ -366,7 +366,7 @@ public void setEnabled(final String username, final boolean enabled, final Refre private void setRegularUserEnabled(final String username, final boolean enabled, final RefreshPolicy refreshPolicy, final ActionListener listener) { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareUpdate(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(USER_DOC_TYPE, username)) @@ -401,7 +401,7 @@ public void onFailure(Exception e) { private void setReservedUserEnabled(final String username, final boolean enabled, final RefreshPolicy refreshPolicy, boolean clearCache, final ActionListener listener) { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareUpdate(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(RESERVED_USER_TYPE, username)) @@ -431,7 +431,7 @@ public void onFailure(Exception e) { } public void deleteUser(final DeleteUserRequest deleteUserRequest, final ActionListener listener) { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { DeleteRequest request = client.prepareDelete(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(USER_DOC_TYPE, deleteUserRequest.username())).request(); request.setRefreshPolicy(deleteUserRequest.getRefreshPolicy()); @@ -470,11 +470,11 @@ void verifyPassword(String username, final SecureString password, ActionListener } void getReservedUserInfo(String username, ActionListener listener) { - if (securityLifecycleService.securityIndex().indexExists() == false) { + if (securityIndex.indexExists() == false) { // TODO remove this short circuiting and fix tests that fail without this! listener.onResponse(null); } else { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareGet(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(RESERVED_USER_TYPE, username)).request(), @@ -514,7 +514,7 @@ public void onFailure(Exception e) { } void getAllReservedUserInfo(ActionListener> listener) { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareSearch(SECURITY_INDEX_NAME) .setQuery(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), RESERVED_USER_TYPE)) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index 199a1c1968408..7dbcea908722c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -30,9 +30,9 @@ import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ReservedUserInfo; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.ArrayList; import java.util.Arrays; @@ -63,16 +63,16 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { private final AnonymousUser anonymousUser; private final boolean realmEnabled; private final boolean anonymousEnabled; - private final SecurityLifecycleService securityLifecycleService; + private final SecurityIndexManager securityIndex; public ReservedRealm(Environment env, Settings settings, NativeUsersStore nativeUsersStore, AnonymousUser anonymousUser, - SecurityLifecycleService securityLifecycleService, ThreadContext threadContext) { + SecurityIndexManager securityIndex, ThreadContext threadContext) { super(TYPE, new RealmConfig(TYPE, Settings.EMPTY, settings, env, threadContext)); this.nativeUsersStore = nativeUsersStore; this.realmEnabled = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings); this.anonymousUser = anonymousUser; this.anonymousEnabled = AnonymousUser.isAnonymousEnabled(settings); - this.securityLifecycleService = securityLifecycleService; + this.securityIndex = securityIndex; final char[] hash = BOOTSTRAP_ELASTIC_PASSWORD.get(settings).length() == 0 ? EMPTY_PASSWORD_HASH : Hasher.BCRYPT.hash(BOOTSTRAP_ELASTIC_PASSWORD.get(settings)); bootstrapUserInfo = new ReservedUserInfo(hash, true, hash == EMPTY_PASSWORD_HASH); @@ -191,7 +191,7 @@ private void getUserInfo(final String username, ActionListener if (userIsDefinedForCurrentSecurityMapping(username) == false) { logger.debug("Marking user [{}] as disabled because the security mapping is not at the required version", username); listener.onResponse(DISABLED_DEFAULT_USER_INFO.deepClone()); - } else if (securityLifecycleService.securityIndex().indexExists() == false) { + } else if (securityIndex.indexExists() == false) { listener.onResponse(getDefaultUserInfo(username)); } else { nativeUsersStore.getReservedUserInfo(username, ActionListener.wrap((userInfo) -> { @@ -218,7 +218,7 @@ private ReservedUserInfo getDefaultUserInfo(String username) { private boolean userIsDefinedForCurrentSecurityMapping(String username) { final Version requiredVersion = getDefinedVersion(username); - return securityLifecycleService.securityIndex().checkMappingVersion(requiredVersion::onOrBefore); + return securityIndex.checkMappingVersion(requiredVersion::onOrBefore); } private Version getDefinedVersion(String username) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index b6df03471242e..7df4114863de2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -12,8 +12,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; @@ -36,7 +34,6 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel; import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -62,13 +59,13 @@ import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed; /** * This store reads + writes {@link ExpressionRoleMapping role mappings} in an Elasticsearch - * {@link SecurityLifecycleService#SECURITY_INDEX_NAME index}. + * {@link SecurityIndexManager#SECURITY_INDEX_NAME index}. *
* The store is responsible for all read and write operations as well as * {@link #resolveRoles(UserData, ActionListener) resolving roles}. @@ -99,13 +96,13 @@ public void onFailure(Exception e) { }; private final Client client; - private final SecurityLifecycleService securityLifecycleService; + private final SecurityIndexManager securityIndex; private final List realmsToRefresh = new CopyOnWriteArrayList<>(); - public NativeRoleMappingStore(Settings settings, Client client, SecurityLifecycleService securityLifecycleService) { + public NativeRoleMappingStore(Settings settings, Client client, SecurityIndexManager securityIndex) { super(settings); this.client = client; - this.securityLifecycleService = securityLifecycleService; + this.securityIndex = securityIndex; } private String getNameFromId(String id) { @@ -122,7 +119,7 @@ private String getIdForName(String name) { * package private for unit testing */ void loadMappings(ActionListener> listener) { - if (securityLifecycleService.securityIndex().isIndexUpToDate() == false) { + if (securityIndex.isIndexUpToDate() == false) { listener.onFailure(new IllegalStateException( "Security index is not on the current version - the native realm will not be operational until " + "the upgrade API is run on the security index")); @@ -178,7 +175,7 @@ public void deleteRoleMapping(DeleteRoleMappingRequest request, ActionListener void modifyMapping(String name, CheckedBiConsumer, Exception> inner, Request request, ActionListener listener) { - if (securityLifecycleService.securityIndex().isIndexUpToDate() == false) { + if (securityIndex.isIndexUpToDate() == false) { listener.onFailure(new IllegalStateException( "Security index is not on the current version - the native realm will not be operational until " + "the upgrade API is run on the security index")); @@ -194,7 +191,7 @@ private void modifyMapping(String name, CheckedBiConsumer listener) { final ExpressionRoleMapping mapping = request.getMapping(); - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { final XContentBuilder xContentBuilder; try { xContentBuilder = mapping.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS, true); @@ -224,7 +221,7 @@ public void onFailure(Exception e) { } private void innerDeleteMapping(DeleteRoleMappingRequest request, ActionListener listener) throws IOException { - if (securityLifecycleService.securityIndex().isIndexUpToDate() == false) { + if (securityIndex.isIndexUpToDate() == false) { listener.onFailure(new IllegalStateException( "Security index is not on the current version - the native realm will not be operational until " + "the upgrade API is run on the security index")); @@ -278,16 +275,16 @@ public void onFailure(Exception e) { } private void getMappings(ActionListener> listener) { - if (securityLifecycleService.securityIndex().isAvailable()) { + if (securityIndex.isAvailable()) { loadMappings(listener); } else { logger.info("The security index is not yet available - no role mappings can be loaded"); if (logger.isDebugEnabled()) { logger.debug("Security Index [{}] [exists: {}] [available: {}] [mapping up to date: {}]", SECURITY_INDEX_NAME, - securityLifecycleService.securityIndex().indexExists(), - securityLifecycleService.securityIndex().isAvailable(), - securityLifecycleService.securityIndex().isMappingUpToDate() + securityIndex.indexExists(), + securityIndex.isAvailable(), + securityIndex.isMappingUpToDate() ); } listener.onResponse(Collections.emptyList()); @@ -304,7 +301,7 @@ private void getMappings(ActionListener> listener) { * */ public void usageStats(ActionListener> listener) { - if (securityLifecycleService.securityIndex().indexExists() == false) { + if (securityIndex.indexExists() == false) { reportStats(listener, Collections.emptyList()); } else { getMappings(ActionListener.wrap(mappings -> reportStats(listener, mappings), listener::onFailure)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 8ab48a0320602..19760ccab0202 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -60,11 +60,11 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authz.IndicesAndAliasesResolver.ResolvedIndices; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.Arrays; import java.util.Collections; @@ -301,7 +301,7 @@ && isSuperuser(authentication.getUser()) == false) { // only the XPackUser is allowed to work with this index, but we should allow indices monitoring actions through for debugging // purposes. These monitor requests also sometimes resolve indices concretely and then requests them logger.debug("user [{}] attempted to directly perform [{}] against the security index [{}]", - authentication.getUser().principal(), action, SecurityLifecycleService.SECURITY_INDEX_NAME); + authentication.getUser().principal(), action, SecurityIndexManager.SECURITY_INDEX_NAME); throw denial(authentication, action, request, permission.names()); } else { putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, indicesAccessControl); @@ -337,7 +337,7 @@ && isSuperuser(authentication.getUser()) == false) { } private boolean hasSecurityIndexAccess(IndicesAccessControl indicesAccessControl) { - for (String index : SecurityLifecycleService.indexNames()) { + for (String index : SecurityIndexManager.indexNames()) { final IndicesAccessControl.IndexAccessControl indexPermissions = indicesAccessControl.getIndexPermissions(index); if (indexPermissions != null && indexPermissions.isGranted()) { return true; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java index 3f257b7f0ce91..3068a3993d309 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java @@ -9,7 +9,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.ArrayList; import java.util.Collections; @@ -58,7 +58,7 @@ private List load() { if (isSuperuser(user) == false) { // we should filter out all of the security indices from wildcards - indicesAndAliases.removeAll(SecurityLifecycleService.indexNames()); + indicesAndAliases.removeAll(SecurityIndexManager.indexNames()); } return Collections.unmodifiableList(indicesAndAliases); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index d90f50ca5faa5..b5a20af8d30b9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -7,8 +7,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -34,7 +32,6 @@ import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.ArrayList; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 834a70b9e0304..b1e5170a2027c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -43,7 +43,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.io.IOException; import java.util.ArrayList; @@ -85,22 +85,21 @@ public class NativeRolesStore extends AbstractComponent { private final XPackLicenseState licenseState; private SecurityClient securityClient; - private final SecurityLifecycleService securityLifecycleService; + private final SecurityIndexManager securityIndex; - public NativeRolesStore(Settings settings, Client client, XPackLicenseState licenseState, - SecurityLifecycleService securityLifecycleService) { + public NativeRolesStore(Settings settings, Client client, XPackLicenseState licenseState, SecurityIndexManager securityIndex) { super(settings); this.client = client; this.securityClient = new SecurityClient(client); this.licenseState = licenseState; - this.securityLifecycleService = securityLifecycleService; + this.securityIndex = securityIndex; } /** * Retrieve a list of roles, if rolesToGet is null or empty, fetch all roles */ public void getRoleDescriptors(String[] names, final ActionListener> listener) { - if (securityLifecycleService.securityIndex().indexExists() == false) { + if (securityIndex.indexExists() == false) { // TODO remove this short circuiting and fix tests that fail without this! listener.onResponse(Collections.emptyList()); } else if (names != null && names.length == 1) { @@ -108,7 +107,7 @@ public void getRoleDescriptors(String[] names, final ActionListener { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { QueryBuilder query; if (names == null || names.length == 0) { query = QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE); @@ -118,7 +117,7 @@ public void getRoleDescriptors(String[] names, final ActionListener supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { - SearchRequest request = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setScroll(TimeValue.timeValueSeconds(10L)) .setQuery(query) .setSize(1000) @@ -133,8 +132,8 @@ public void getRoleDescriptors(String[] names, final ActionListener listener) { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { - DeleteRequest request = client.prepareDelete(SecurityLifecycleService.SECURITY_INDEX_NAME, + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + DeleteRequest request = client.prepareDelete(SecurityIndexManager.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(deleteRoleRequest.name())).request(); request.setRefreshPolicy(deleteRoleRequest.getRefreshPolicy()); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, @@ -166,7 +165,7 @@ public void putRole(final PutRoleRequest request, final RoleDescriptor role, fin // pkg-private for testing void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { final XContentBuilder xContentBuilder; try { xContentBuilder = role.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS, true); @@ -175,7 +174,7 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final return; } executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role.getName())) + client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role.getName())) .setSource(xContentBuilder) .setRefreshPolicy(request.getRefreshPolicy()) .request(), @@ -197,19 +196,19 @@ public void onFailure(Exception e) { public void usageStats(ActionListener> listener) { Map usageStats = new HashMap<>(); - if (securityLifecycleService.securityIndex().indexExists() == false) { + if (securityIndex.indexExists() == false) { usageStats.put("size", 0L); usageStats.put("fls", false); usageStats.put("dls", false); listener.onResponse(usageStats); } else { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareMultiSearch() - .add(client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setQuery(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) .setSize(0)) - .add(client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setQuery(QueryBuilders.boolQuery() .must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) .must(QueryBuilders.boolQuery() @@ -219,7 +218,7 @@ public void usageStats(ActionListener> listener) { .should(existsQuery("indices.fields")))) .setSize(0) .setTerminateAfter(1)) - .add(client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setQuery(QueryBuilders.boolQuery() .must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) .filter(existsQuery("indices.query"))) @@ -259,11 +258,11 @@ public void onFailure(Exception e) { } private void getRoleDescriptor(final String roleId, ActionListener roleActionListener) { - if (securityLifecycleService.securityIndex().indexExists() == false) { + if (securityIndex.indexExists() == false) { // TODO remove this short circuiting and fix tests that fail without this! roleActionListener.onResponse(null); } else { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(roleActionListener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(roleActionListener::onFailure, () -> executeGetRoleRequest(roleId, new ActionListener() { @Override public void onResponse(GetResponse response) { @@ -288,9 +287,9 @@ public void onFailure(Exception e) { } private void executeGetRoleRequest(String role, ActionListener listener) { - securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, + client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role)).request(), listener, client::get)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 4bcfb779b0d50..45c55c633d923 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -23,6 +23,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.AliasOrIndex; @@ -30,15 +31,19 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xpack.core.template.TemplateUtils; import org.elasticsearch.xpack.core.upgrade.IndexUpgradeCheckVersion; import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -54,18 +59,18 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; /** * Manages the lifecycle of a single index, its template, mapping and and data upgrades/migrations. */ -public class SecurityIndexManager extends AbstractComponent { +public class SecurityIndexManager extends AbstractComponent implements ClusterStateListener { public static final String INTERNAL_SECURITY_INDEX = ".security-" + IndexUpgradeCheckVersion.UPRADE_VERSION; public static final int INTERNAL_INDEX_FORMAT = 6; public static final String SECURITY_VERSION_STRING = "security-version"; public static final String TEMPLATE_VERSION_PATTERN = Pattern.quote("${security.template.version}"); public static final String SECURITY_TEMPLATE_NAME = "security-index-template"; + public static final String SECURITY_INDEX_NAME = ".security"; private final String indexName; private final Client client; @@ -74,10 +79,15 @@ public class SecurityIndexManager extends AbstractComponent { private volatile State indexState = new State(false, false, false, false, null, null); - public SecurityIndexManager(Settings settings, Client client, String indexName) { + public SecurityIndexManager(Settings settings, Client client, String indexName, ClusterService clusterService) { super(settings); this.client = client; this.indexName = indexName; + clusterService.addListener(this); + } + + public static List indexNames() { + return Collections.unmodifiableList(Arrays.asList(SECURITY_INDEX_NAME, INTERNAL_SECURITY_INDEX)); } public boolean checkMappingVersion(Predicate requiredVersion) { @@ -115,7 +125,14 @@ public void addIndexStateListener(BiConsumer listener) { stateChangeListeners.add(listener); } + @Override public void clusterChanged(ClusterChangedEvent event) { + if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + // wait until the gateway has recovered from disk, otherwise we think we don't have the + // .security index but they may not have been restored from the cluster state on disk + logger.debug("security index manager waiting until state has been recovered"); + return; + } final State previousState = indexState; final IndexMetaData indexMetaData = resolveConcreteIndex(indexName, event.state().metaData()); final boolean indexExists = indexMetaData != null; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java index 3c3eddfc14c08..d269de25c612d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java @@ -7,16 +7,14 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse; import org.elasticsearch.xpack.core.security.action.role.GetRolesResponse; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.Before; import org.junit.BeforeClass; @@ -57,7 +55,7 @@ public void setupForTests() { logger.debug("--> created role [{}]", role); } - ensureGreen(SecurityLifecycleService.SECURITY_INDEX_NAME); + ensureGreen(SecurityIndexManager.SECURITY_INDEX_NAME); // warm up the caches on every node for (NativeRolesStore rolesStore : internalCluster().getInstances(NativeRolesStore.class)) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index bc19df6185d63..00b46b332cb7c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -64,7 +64,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsCollectionContaining.hasItem; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 988f60fe57e4f..190c8703955b1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -62,7 +62,7 @@ import java.util.function.Predicate; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_INDEX_FORMAT; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 52a2e537d8db5..09a48a0eb1370 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -57,7 +57,6 @@ import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.UserToken; @@ -161,16 +160,14 @@ void doExecute(Action action, Request request } }; - final SecurityLifecycleService lifecycleService = mock(SecurityLifecycleService.class); final SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); - when(lifecycleService.securityIndex()).thenReturn(securityIndex); doAnswer(inv -> { ((Runnable) inv.getArguments()[1]).run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); final ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - tokenService = new TokenService(settings, Clock.systemUTC(), client, lifecycleService, clusterService); + tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService); final TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 93e6ebf2861cf..eca52831d9adc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -47,7 +47,6 @@ import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.UserToken; @@ -173,16 +172,14 @@ public void setup() throws Exception { return Void.TYPE; }).when(client).execute(eq(IndexAction.INSTANCE), any(IndexRequest.class), any(ActionListener.class)); - final SecurityLifecycleService lifecycleService = mock(SecurityLifecycleService.class); final SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); - when(lifecycleService.securityIndex()).thenReturn(securityIndex); doAnswer(inv -> { ((Runnable) inv.getArguments()[1]).run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); final ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - tokenService = new TokenService(settings, Clock.systemUTC(), client, lifecycleService, clusterService); + tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService); final TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index 02af431f8978b..6750560b0b0d2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackUser; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealmTests; @@ -76,13 +75,11 @@ public void maybeEnableAnonymous() { public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); - SecurityLifecycleService securityLifecycleService = mock(SecurityLifecycleService.class); SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); - when(securityLifecycleService.securityIndex()).thenReturn(securityIndex); when(securityIndex.isAvailable()).thenReturn(true); AnonymousUser anonymousUser = new AnonymousUser(settings); ReservedRealm reservedRealm = - new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, securityIndex, new ThreadContext(Settings.EMPTY)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), @@ -148,15 +145,13 @@ public void onFailure(Exception e) { public void testReservedUsersOnly() { NativeUsersStore usersStore = mock(NativeUsersStore.class); - SecurityLifecycleService securityLifecycleService = mock(SecurityLifecycleService.class); SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); - when(securityLifecycleService.securityIndex()).thenReturn(securityIndex); when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.checkMappingVersion(any())).thenReturn(true); ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); ReservedRealm reservedRealm = - new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings), securityIndex, new ThreadContext(Settings.EMPTY)); PlainActionFuture> userFuture = new PlainActionFuture<>(); reservedRealm.users(userFuture); final Collection allReservedUsers = userFuture.actionGet(); @@ -198,13 +193,11 @@ public void testGetAllUsers() { final List storeUsers = randomFrom(Collections.emptyList(), Collections.singletonList(new User("joe")), Arrays.asList(new User("jane"), new User("fred")), randomUsers()); NativeUsersStore usersStore = mock(NativeUsersStore.class); - SecurityLifecycleService securityLifecycleService = mock(SecurityLifecycleService.class); SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); - when(securityLifecycleService.securityIndex()).thenReturn(securityIndex); when(securityIndex.isAvailable()).thenReturn(true); ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings), - securityLifecycleService, new ThreadContext(Settings.EMPTY)); + securityIndex, new ThreadContext(Settings.EMPTY)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index 7b26e605207a2..65cf74971a55c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackUser; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealmTests; @@ -118,14 +117,12 @@ public void onFailure(Exception e) { public void testReservedUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); - SecurityLifecycleService securityLifecycleService = mock(SecurityLifecycleService.class); SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); - when(securityLifecycleService.securityIndex()).thenReturn(securityIndex); when(securityIndex.isAvailable()).thenReturn(true); ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); Settings settings = Settings.builder().put("path.home", createTempDir()).build(); ReservedRealm reservedRealm = new ReservedRealm(TestEnvironment.newEnvironment(settings), settings, usersStore, - new AnonymousUser(settings), securityLifecycleService, new ThreadContext(settings)); + new AnonymousUser(settings), securityIndex, new ThreadContext(settings)); PlainActionFuture> userFuture = new PlainActionFuture<>(); reservedRealm.users(userFuture); final User reserved = randomFrom(userFuture.actionGet().toArray(new User[0])); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 41b765cb33322..cd685b8f34c28 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -64,7 +64,6 @@ import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.AuthenticationService.Authenticator; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; @@ -125,7 +124,6 @@ public class AuthenticationServiceTests extends ESTestCase { private ThreadPool threadPool; private ThreadContext threadContext; private TokenService tokenService; - private SecurityLifecycleService lifecycleService; private SecurityIndexManager securityIndex; private Client client; private InetSocketAddress remoteAddress; @@ -182,16 +180,14 @@ licenseState, threadContext, mock(ReservedRealm.class), Arrays.asList(firstRealm .setId((String) invocationOnMock.getArguments()[2]); return builder; }).when(client).prepareGet(anyString(), anyString(), anyString()); - lifecycleService = mock(SecurityLifecycleService.class); securityIndex = mock(SecurityIndexManager.class); - when(lifecycleService.securityIndex()).thenReturn(securityIndex); doAnswer(invocationOnMock -> { Runnable runnable = (Runnable) invocationOnMock.getArguments()[1]; runnable.run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - tokenService = new TokenService(settings, Clock.systemUTC(), client, lifecycleService, clusterService); + tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService); service = new AuthenticationService(settings, realms, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(settings), tokenService); } @@ -929,7 +925,7 @@ public void testInvalidToken() throws Exception { public void testExpiredToken() throws Exception { when(securityIndex.isAvailable()).thenReturn(true); - when(lifecycleService.securityIndex().indexExists()).thenReturn(true); + when(securityIndex.indexExists()).thenReturn(true); User user = new User("_username", "r1"); final Authentication expected = new Authentication(user, new RealmRef("realm", "custom", "node"), null); PlainActionFuture> tokenFuture = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java index 47eb1eabae159..0cbeced00b2ab 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -31,18 +30,15 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; public class InternalRealmsTests extends ESTestCase { public void testNativeRealmRegistersIndexHealthChangeListener() throws Exception { - SecurityLifecycleService lifecycleService = mock(SecurityLifecycleService.class); SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); - when(lifecycleService.securityIndex()).thenReturn(securityIndex); Map factories = InternalRealms.getFactories(mock(ThreadPool.class), mock(ResourceWatcherService.class), - mock(SSLService.class), mock(NativeUsersStore.class), mock(NativeRoleMappingStore.class), lifecycleService); + mock(SSLService.class), mock(NativeUsersStore.class), mock(NativeRoleMappingStore.class), securityIndex); assertThat(factories, hasEntry(is(NativeRealmSettings.TYPE), any(Realm.Factory.class))); - verifyZeroInteractions(lifecycleService); + verifyZeroInteractions(securityIndex); Settings settings = Settings.builder().put("path.home", createTempDir()).build(); factories.get(NativeRealmSettings.TYPE).create(new RealmConfig("test", Settings.EMPTY, settings, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index a8a0f858d9c03..ec4a97b7f392b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.xpack.core.security.authc.TokenMetaData; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.After; import org.junit.Before; @@ -147,7 +147,7 @@ public void testExpiredTokensDeletedAfterExpiration() throws Exception { assertTrue(invalidateResponse.isCreated()); AtomicReference docId = new AtomicReference<>(); assertBusy(() -> { - SearchResponse searchResponse = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + SearchResponse searchResponse = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setSource(SearchSourceBuilder.searchSource() .query(QueryBuilders.termQuery("doc_type", TokenService.INVALIDATED_TOKEN_DOC_TYPE))) .setSize(1) @@ -160,7 +160,7 @@ public void testExpiredTokensDeletedAfterExpiration() throws Exception { // hack doc to modify the time to the day before Instant dayBefore = created.minus(1L, ChronoUnit.DAYS); assertTrue(Instant.now().isAfter(dayBefore)); - client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, "doc", docId.get()) + client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, "doc", docId.get()) .setDoc("expiration_time", dayBefore.toEpochMilli()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -178,8 +178,8 @@ public void testExpiredTokensDeletedAfterExpiration() throws Exception { assertEquals("token malformed", e.getMessage()); } } - client.admin().indices().prepareRefresh(SecurityLifecycleService.SECURITY_INDEX_NAME).get(); - SearchResponse searchResponse = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + client.admin().indices().prepareRefresh(SecurityIndexManager.SECURITY_INDEX_NAME).get(); + SearchResponse searchResponse = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setSource(SearchSourceBuilder.searchSource() .query(QueryBuilders.termQuery("doc_type", TokenService.INVALIDATED_TOKEN_DOC_TYPE))) .setSize(0) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 79a2647997505..af1034d957455 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -50,7 +50,6 @@ import org.elasticsearch.xpack.core.security.authc.TokenMetaData; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.AfterClass; import org.junit.Before; @@ -86,7 +85,6 @@ public class TokenServiceTests extends ESTestCase { .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), true).build(); private Client client; - private SecurityLifecycleService lifecycleService; private SecurityIndexManager securityIndex; private ClusterService clusterService; private Settings tokenServiceEnabledSettings = Settings.builder() @@ -132,9 +130,7 @@ public void setupClient() { }).when(client).execute(eq(IndexAction.INSTANCE), any(IndexRequest.class), any(ActionListener.class)); // setup lifecycle service - lifecycleService = mock(SecurityLifecycleService.class); securityIndex = mock(SecurityIndexManager.class); - when(lifecycleService.securityIndex()).thenReturn(securityIndex); doAnswer(invocationOnMock -> { Runnable runnable = (Runnable) invocationOnMock.getArguments()[1]; runnable.run(); @@ -157,7 +153,7 @@ public static void shutdownThreadpool() throws InterruptedException { } public void testAttachAndGetToken() throws Exception { - TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService, clusterService); + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); PlainActionFuture> tokenFuture = new PlainActionFuture<>(); tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap()); @@ -177,7 +173,7 @@ public void testAttachAndGetToken() throws Exception { try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { // verify a second separate token service with its own salt can also verify - TokenService anotherService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService + TokenService anotherService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex , clusterService); anotherService.refreshMetaData(tokenService.getTokenMetaData()); PlainActionFuture future = new PlainActionFuture<>(); @@ -188,7 +184,7 @@ public void testAttachAndGetToken() throws Exception { } public void testRotateKey() throws Exception { - TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService, clusterService); + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); PlainActionFuture> tokenFuture = new PlainActionFuture<>(); tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap()); @@ -240,12 +236,12 @@ private void rotateKeys(TokenService tokenService) { } public void testKeyExchange() throws Exception { - TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService, clusterService); + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); int numRotations = 0;randomIntBetween(1, 5); for (int i = 0; i < numRotations; i++) { rotateKeys(tokenService); } - TokenService otherTokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService, + TokenService otherTokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); otherTokenService.refreshMetaData(tokenService.getTokenMetaData()); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); @@ -277,7 +273,7 @@ public void testKeyExchange() throws Exception { } public void testPruneKeys() throws Exception { - TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService, clusterService); + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); PlainActionFuture> tokenFuture = new PlainActionFuture<>(); tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap()); @@ -338,7 +334,7 @@ public void testPruneKeys() throws Exception { } public void testPassphraseWorks() throws Exception { - TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService, clusterService); + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); PlainActionFuture> tokenFuture = new PlainActionFuture<>(); tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap()); @@ -358,7 +354,7 @@ public void testPassphraseWorks() throws Exception { try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { // verify a second separate token service with its own passphrase cannot verify - TokenService anotherService = new TokenService(Settings.EMPTY, systemUTC(), client, lifecycleService, + TokenService anotherService = new TokenService(Settings.EMPTY, systemUTC(), client, securityIndex, clusterService); PlainActionFuture future = new PlainActionFuture<>(); anotherService.getAndValidateToken(requestContext, future); @@ -367,7 +363,7 @@ public void testPassphraseWorks() throws Exception { } public void testGetTokenWhenKeyCacheHasExpired() throws Exception { - TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService, clusterService); + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); PlainActionFuture> tokenFuture = new PlainActionFuture<>(); @@ -382,7 +378,7 @@ public void testGetTokenWhenKeyCacheHasExpired() throws Exception { public void testInvalidatedToken() throws Exception { when(securityIndex.indexExists()).thenReturn(true); TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); PlainActionFuture> tokenFuture = new PlainActionFuture<>(); tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap()); @@ -436,7 +432,7 @@ public void testComputeSecretKeyIsConsistent() throws Exception { public void testTokenExpiry() throws Exception { ClockMock clock = ClockMock.frozen(); - TokenService tokenService = new TokenService(tokenServiceEnabledSettings, clock, client, lifecycleService, clusterService); + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, clock, client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); PlainActionFuture> tokenFuture = new PlainActionFuture<>(); tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap()); @@ -488,7 +484,7 @@ public void testTokenServiceDisabled() throws Exception { TokenService tokenService = new TokenService(Settings.builder() .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), false) .build(), - Clock.systemUTC(), client, lifecycleService, clusterService); + Clock.systemUTC(), client, securityIndex, clusterService); IllegalStateException e = expectThrows(IllegalStateException.class, () -> tokenService.createUserToken(null, null, null, null)); assertEquals("tokens are not enabled", e.getMessage()); @@ -530,7 +526,7 @@ public void testMalformedToken() throws Exception { final int numBytes = randomIntBetween(1, TokenService.MINIMUM_BYTES + 32); final byte[] randomBytes = new byte[numBytes]; random().nextBytes(randomBytes); - TokenService tokenService = new TokenService(Settings.EMPTY, systemUTC(), client, lifecycleService, clusterService); + TokenService tokenService = new TokenService(Settings.EMPTY, systemUTC(), client, securityIndex, clusterService); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + Base64.getEncoder().encodeToString(randomBytes)); @@ -544,7 +540,7 @@ public void testMalformedToken() throws Exception { public void testIndexNotAvailable() throws Exception { TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, systemUTC(), client, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); PlainActionFuture> tokenFuture = new PlainActionFuture<>(); tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap()); @@ -577,7 +573,7 @@ public void testIndexNotAvailable() throws Exception { public void testGetAuthenticationWorksWithExpiredToken() throws Exception { TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); UserToken expired = new UserToken(authentication, Instant.now().minus(3L, ChronoUnit.DAYS)); mockGetTokenFromId(expired); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java index 839b272d115cc..58cb515081a59 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java @@ -9,14 +9,13 @@ import joptsimple.OptionSet; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.core.security.authc.support.CharArrays; import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.BeforeClass; import java.nio.charset.StandardCharsets; @@ -82,7 +81,7 @@ public void testRetrieveUsers() throws Exception { addedUsers.add(uname); } logger.error("--> waiting for .security index"); - ensureGreen(SecurityLifecycleService.SECURITY_INDEX_NAME); + ensureGreen(SecurityIndexManager.SECURITY_INDEX_NAME); MockTerminal t = new MockTerminal(); String username = nodeClientUsername(); @@ -127,7 +126,7 @@ public void testRetrieveRoles() throws Exception { addedRoles.add(rname); } logger.error("--> waiting for .security index"); - ensureGreen(SecurityLifecycleService.SECURITY_INDEX_NAME); + ensureGreen(SecurityIndexManager.SECURITY_INDEX_NAME); MockTerminal t = new MockTerminal(); String username = nodeClientUsername(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 2c11411955a0f..a238576e41323 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -54,7 +54,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_SECURITY_INDEX; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index 091f6f2ed4571..59244bbc50975 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.Before; @@ -113,7 +112,7 @@ public void testBlankPasswordInIndexImpliesDefaultPassword() throws Exception { values.put(PASSWORD_FIELD, BLANK_PASSWORD); final GetResult result = new GetResult( - SecurityLifecycleService.SECURITY_INDEX_NAME, + SecurityIndexManager.SECURITY_INDEX_NAME, NativeUsersStore.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.RESERVED_USER_TYPE, randomAlphaOfLength(12)), 1L, @@ -182,7 +181,7 @@ public void testVerifyNonExistentUser() throws Exception { nativeUsersStore.verifyPassword(username, password, future); final GetResult getResult = new GetResult( - SecurityLifecycleService.SECURITY_INDEX_NAME, + SecurityIndexManager.SECURITY_INDEX_NAME, NativeUsersStore.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.USER_DOC_TYPE, username), 1L, @@ -223,7 +222,7 @@ private void respondToGetUserRequest(String username, SecureString password, Str values.put(User.Fields.TYPE.getPreferredName(), NativeUsersStore.USER_DOC_TYPE); final BytesReference source = BytesReference.bytes(jsonBuilder().map(values)); final GetResult getResult = new GetResult( - SecurityLifecycleService.SECURITY_INDEX_NAME, + SecurityIndexManager.SECURITY_INDEX_NAME, NativeUsersStore.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.USER_DOC_TYPE, username), 1L, @@ -236,9 +235,7 @@ private void respondToGetUserRequest(String username, SecureString password, Str } private NativeUsersStore startNativeUsersStore() { - SecurityLifecycleService securityLifecycleService = mock(SecurityLifecycleService.class); SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); - when(securityLifecycleService.securityIndex()).thenReturn(securityIndex); when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.indexExists()).thenReturn(true); when(securityIndex.isMappingUpToDate()).thenReturn(true); @@ -248,7 +245,7 @@ private NativeUsersStore startNativeUsersStore() { action.run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); - return new NativeUsersStore(Settings.EMPTY, client, securityLifecycleService); + return new NativeUsersStore(Settings.EMPTY, client, securityIndex); } } \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index 024f8f603c928..9fc52e8af63bc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.UsernamesField; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ReservedUserInfo; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.Before; @@ -63,15 +62,12 @@ public class ReservedRealmTests extends ESTestCase { private static final SecureString EMPTY_PASSWORD = new SecureString("".toCharArray()); private NativeUsersStore usersStore; - private SecurityLifecycleService securityLifecycleService; private SecurityIndexManager securityIndex; @Before public void setupMocks() throws Exception { usersStore = mock(NativeUsersStore.class); - securityLifecycleService = mock(SecurityLifecycleService.class); securityIndex = mock(SecurityIndexManager.class); - when(securityLifecycleService.securityIndex()).thenReturn(securityIndex); when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.checkMappingVersion(any())).thenReturn(true); mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); @@ -82,7 +78,7 @@ public void testReservedUserEmptyPasswordAuthenticationFails() throws Throwable UsernamesField.BEATS_NAME); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); PlainActionFuture listener = new PlainActionFuture<>(); @@ -98,7 +94,7 @@ public void testAuthenticationDisabled() throws Throwable { } final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, - new AnonymousUser(settings), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(settings), securityIndex, new ThreadContext(Settings.EMPTY)); final User expected = randomReservedUser(true); final String principal = expected.principal(); @@ -120,7 +116,7 @@ public void testAuthenticationDisabledUserWithStoredPassword() throws Throwable private void verifySuccessfulAuthentication(boolean enabled) throws Exception { final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); final User expectedUser = randomReservedUser(enabled); final String principal = expectedUser.principal(); final SecureString newPassword = new SecureString("foobar".toCharArray()); @@ -161,7 +157,7 @@ private void verifySuccessfulAuthentication(boolean enabled) throws Exception { public void testLookup() throws Exception { final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); final User expectedUser = randomReservedUser(true); final String principal = expectedUser.principal(); @@ -186,7 +182,7 @@ public void testLookupDisabled() throws Exception { Settings settings = Settings.builder().put(XPackSettings.RESERVED_REALM_ENABLED_SETTING.getKey(), false).build(); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings), - securityLifecycleService, new ThreadContext(Settings.EMPTY)); + securityIndex, new ThreadContext(Settings.EMPTY)); final User expectedUser = randomReservedUser(true); final String principal = expectedUser.principal(); @@ -200,7 +196,7 @@ public void testLookupDisabled() throws Exception { public void testLookupThrows() throws Exception { final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); final User expectedUser = randomReservedUser(true); final String principal = expectedUser.principal(); when(securityIndex.indexExists()).thenReturn(true); @@ -247,7 +243,7 @@ public void testIsReservedDisabled() { public void testGetUsers() { final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); PlainActionFuture> userFuture = new PlainActionFuture<>(); reservedRealm.users(userFuture); assertThat(userFuture.actionGet(), @@ -262,7 +258,7 @@ public void testGetUsersDisabled() { .build(); final AnonymousUser anonymousUser = new AnonymousUser(settings); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, - securityLifecycleService, new ThreadContext(Settings.EMPTY)); + securityIndex, new ThreadContext(Settings.EMPTY)); PlainActionFuture> userFuture = new PlainActionFuture<>(); reservedRealm.users(userFuture); if (anonymousEnabled) { @@ -279,7 +275,7 @@ public void testFailedAuthentication() throws Exception { ReservedUserInfo userInfo = new ReservedUserInfo(hash, true, false); mockGetAllReservedUserInfo(usersStore, Collections.singletonMap("elastic", userInfo)); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); if (randomBoolean()) { PlainActionFuture future = new PlainActionFuture<>(); @@ -309,7 +305,7 @@ public void testBootstrapElasticPasswordWorksOnceSecurityIndexExists() throws Ex when(securityIndex.indexExists()).thenReturn(true); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); PlainActionFuture listener = new PlainActionFuture<>(); doAnswer((i) -> { @@ -331,7 +327,7 @@ public void testBootstrapElasticPasswordFailsOnceElasticUserExists() throws Exce when(securityIndex.indexExists()).thenReturn(true); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); PlainActionFuture listener = new PlainActionFuture<>(); SecureString password = new SecureString("password".toCharArray()); doAnswer((i) -> { @@ -358,7 +354,7 @@ public void testBootstrapElasticPasswordWorksBeforeSecurityIndexExists() throws when(securityIndex.indexExists()).thenReturn(false); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); PlainActionFuture listener = new PlainActionFuture<>(); reservedRealm.doAuthenticate(new UsernamePasswordToken(new ElasticUser(true).principal(), @@ -376,7 +372,7 @@ public void testNonElasticUsersCannotUseBootstrapPasswordWhenSecurityIndexExists when(securityIndex.indexExists()).thenReturn(true); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); PlainActionFuture listener = new PlainActionFuture<>(); final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); @@ -398,7 +394,7 @@ public void testNonElasticUsersCannotUseBootstrapPasswordWhenSecurityIndexDoesNo when(securityIndex.indexExists()).thenReturn(false); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); PlainActionFuture listener = new PlainActionFuture<>(); final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index 693118c21bde5..2a1c2dabe30b7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression.FieldValue; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -73,12 +72,10 @@ public void testResolveRoles() throws Exception { Arrays.asList("mutants"), Collections.emptyMap(), false); final Client client = mock(Client.class); - final SecurityLifecycleService lifecycleService = mock(SecurityLifecycleService.class); SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); - when(lifecycleService.securityIndex()).thenReturn(securityIndex); when(securityIndex.isAvailable()).thenReturn(true); - final NativeRoleMappingStore store = new NativeRoleMappingStore(Settings.EMPTY, client, lifecycleService) { + final NativeRoleMappingStore store = new NativeRoleMappingStore(Settings.EMPTY, client, securityIndex) { @Override protected void loadMappings(ActionListener> listener) { final List mappings = Arrays.asList(mapping1, mapping2, mapping3, mapping4); @@ -212,7 +209,7 @@ protected void doLookupUser(String username, ActionListener listener) { listener.onResponse(null); } }; - final NativeRoleMappingStore store = new NativeRoleMappingStore(Settings.EMPTY, client, mock(SecurityLifecycleService.class)); + final NativeRoleMappingStore store = new NativeRoleMappingStore(Settings.EMPTY, client, mock(SecurityIndexManager.class)); store.refreshRealmOnChange(mockRealm); return store; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 3013a7c41c2ac..bcd31c32f7f78 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -140,7 +140,7 @@ import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationException; import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationExceptionRunAs; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java index 4bb8af96ca8c8..1d0e5c179a9cd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java @@ -20,8 +20,8 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.List; @@ -81,7 +81,7 @@ public void testSecurityIndicesAreRemovedFromRegularUser() { MetaData metaData = MetaData.builder() .put(new IndexMetaData.Builder("an-index").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) .put(new IndexMetaData.Builder("another-index").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) - .put(new IndexMetaData.Builder(SecurityLifecycleService.SECURITY_INDEX_NAME).settings(indexSettings) + .put(new IndexMetaData.Builder(SecurityIndexManager.SECURITY_INDEX_NAME).settings(indexSettings) .numberOfShards(1).numberOfReplicas(0).build(), true) .build(); @@ -97,12 +97,12 @@ public void testSecurityIndicesAreNotRemovedFromSuperUsers() { MetaData metaData = MetaData.builder() .put(new IndexMetaData.Builder("an-index").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) .put(new IndexMetaData.Builder("another-index").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) - .put(new IndexMetaData.Builder(SecurityLifecycleService.SECURITY_INDEX_NAME).settings(indexSettings) + .put(new IndexMetaData.Builder(SecurityIndexManager.SECURITY_INDEX_NAME).settings(indexSettings) .numberOfShards(1).numberOfReplicas(0).build(), true) .build(); AuthorizedIndices authorizedIndices = new AuthorizedIndices(user, role, SearchAction.NAME, metaData); List list = authorizedIndices.get(); - assertThat(list, containsInAnyOrder("an-index", "another-index", SecurityLifecycleService.SECURITY_INDEX_NAME)); + assertThat(list, containsInAnyOrder("an-index", "another-index", SecurityIndexManager.SECURITY_INDEX_NAME)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 17d8c754e1642..b080b5924ce7a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -61,10 +61,10 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authz.IndicesAndAliasesResolver.ResolvedIndices; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.junit.Before; @@ -75,7 +75,7 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -1199,14 +1199,14 @@ public void testXPackSecurityUserHasAccessToSecurityIndex() { { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(XPackSecurityUser.INSTANCE, SearchAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); - assertThat(indices, hasItem(SecurityLifecycleService.SECURITY_INDEX_NAME)); + assertThat(indices, hasItem(SecurityIndexManager.SECURITY_INDEX_NAME)); } { IndicesAliasesRequest aliasesRequest = new IndicesAliasesRequest(); aliasesRequest.addAliasAction(AliasActions.add().alias("security_alias").index(SECURITY_INDEX_NAME)); final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(XPackSecurityUser.INSTANCE, IndicesAliasesAction.NAME); List indices = resolveIndices(aliasesRequest, authorizedIndices).getLocal(); - assertThat(indices, hasItem(SecurityLifecycleService.SECURITY_INDEX_NAME)); + assertThat(indices, hasItem(SecurityIndexManager.SECURITY_INDEX_NAME)); } } @@ -1214,7 +1214,7 @@ public void testXPackUserDoesNotHaveAccessToSecurityIndex() { SearchRequest request = new SearchRequest(); final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(XPackUser.INSTANCE, SearchAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); - assertThat(indices, not(hasItem(SecurityLifecycleService.SECURITY_INDEX_NAME))); + assertThat(indices, not(hasItem(SecurityIndexManager.SECURITY_INDEX_NAME))); } public void testNonXPackUserAccessingSecurityIndex() { @@ -1226,7 +1226,7 @@ public void testNonXPackUserAccessingSecurityIndex() { SearchRequest request = new SearchRequest(); final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(allAccessUser, SearchAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); - assertThat(indices, not(hasItem(SecurityLifecycleService.SECURITY_INDEX_NAME))); + assertThat(indices, not(hasItem(SecurityIndexManager.SECURITY_INDEX_NAME))); } { @@ -1234,7 +1234,7 @@ public void testNonXPackUserAccessingSecurityIndex() { aliasesRequest.addAliasAction(AliasActions.add().alias("security_alias1").index("*")); final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(allAccessUser, IndicesAliasesAction.NAME); List indices = resolveIndices(aliasesRequest, authorizedIndices).getLocal(); - assertThat(indices, not(hasItem(SecurityLifecycleService.SECURITY_INDEX_NAME))); + assertThat(indices, not(hasItem(SecurityIndexManager.SECURITY_INDEX_NAME))); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index ab6664b53b0fb..a2c70db3b63e8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -41,8 +41,6 @@ import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.security.SecurityLifecycleService; -import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.junit.After; @@ -58,7 +56,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; @@ -189,10 +187,9 @@ public void testPutOfRoleWithFlsDlsUnlicensed() throws IOException { final ClusterService clusterService = mock(ClusterService.class); final XPackLicenseState licenseState = mock(XPackLicenseState.class); final AtomicBoolean methodCalled = new AtomicBoolean(false); - final SecurityLifecycleService securityLifecycleService = - new SecurityLifecycleService(Settings.EMPTY, clusterService, threadPool, client, - mock(IndexAuditTrail.class)); - final NativeRolesStore rolesStore = new NativeRolesStore(Settings.EMPTY, client, licenseState, securityLifecycleService) { + final SecurityIndexManager securityIndex = + new SecurityIndexManager(Settings.EMPTY, client, SecurityIndexManager.SECURITY_INDEX_NAME, clusterService); + final NativeRolesStore rolesStore = new NativeRolesStore(Settings.EMPTY, client, licenseState, securityIndex) { @Override void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { if (methodCalled.compareAndSet(false, true)) { @@ -203,7 +200,7 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final } }; // setup the roles store so the security index exists - securityLifecycleService.clusterChanged(new ClusterChangedEvent( + securityIndex.clusterChanged(new ClusterChangedEvent( "fls_dls_license", getClusterStateWithSecurityIndex(), getEmptyClusterState())); PutRoleRequest putRoleRequest = new PutRoleRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index fe51f2beca34d..b5b67c7e7b2c1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -51,7 +52,7 @@ import org.junit.Before; import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_TEMPLATE_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.TEMPLATE_VERSION_PATTERN; import static org.hamcrest.Matchers.equalTo; @@ -74,6 +75,7 @@ public void setUpManager() { when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); when(mockClient.threadPool()).thenReturn(threadPool); when(mockClient.settings()).thenReturn(Settings.EMPTY); + final ClusterService clusterService = mock(ClusterService.class); actions = new LinkedHashMap<>(); final Client client = new FilterClient(mockClient) { @@ -88,7 +90,7 @@ void doExecute(Action action, Request request actions.put(action, map); } }; - manager = new SecurityIndexManager(Settings.EMPTY, client, INDEX_NAME); + manager = new SecurityIndexManager(Settings.EMPTY, client, INDEX_NAME, clusterService); } public void testIndexWithUpToDateMappingAndTemplate() throws IOException { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java index 63c267eb816fc..aa4982cce3f84 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java @@ -40,7 +40,7 @@ import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; import static java.nio.file.StandardOpenOption.WRITE; import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.junit.Assert.assertEquals; public class SecurityTestUtils { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/XPackUserTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/XPackUserTests.java index 99c2ae635f6a6..e7b31d88eda19 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/XPackUserTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/XPackUserTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.index.IndexAuditTrailField; import org.elasticsearch.xpack.core.security.user.XPackUser; -import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.audit.index.IndexNameResolver; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.hamcrest.Matchers; import org.joda.time.DateTime; @@ -31,8 +31,8 @@ public void testXPackUserCanAccessNonSecurityIndices() { public void testXPackUserCannotAccessSecurityIndex() { final String action = randomFrom(GetAction.NAME, SearchAction.NAME, IndexAction.NAME); final Predicate predicate = XPackUser.ROLE.indices().allowedIndicesMatcher(action); - assertThat(predicate.test(SecurityLifecycleService.SECURITY_INDEX_NAME), Matchers.is(false)); - assertThat(predicate.test(SecurityLifecycleService.INTERNAL_SECURITY_INDEX), Matchers.is(false)); + assertThat(predicate.test(SecurityIndexManager.SECURITY_INDEX_NAME), Matchers.is(false)); + assertThat(predicate.test(SecurityIndexManager.INTERNAL_SECURITY_INDEX), Matchers.is(false)); } public void testXPackUserCanReadAuditTrail() { From 09329eb84f88c9fd241dcc9d33b3eb7478ee6037 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Tue, 15 May 2018 22:46:46 +0300 Subject: [PATCH 33/74] SQL: Verify GROUP BY ordering on grouped columns (#30585) Due to the way composite aggregation works, ordering in GROUP BY can be applied only through grouped columns which now the analyzer verifier enforces. Fix 29900 --- .../xpack/sql/analysis/analyzer/Verifier.java | 23 ++++++++++++++++--- .../analyzer/VerifierErrorMessagesTests.java | 19 +++++++++++++-- .../planner/VerifierErrorMessagesTests.java | 14 +++++++++++ 3 files changed, 51 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index f5147b84468b7..6f8be61b463fd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -211,12 +211,13 @@ static Collection verify(LogicalPlan plan) { /** * Check validity of Aggregate/GroupBy. - * This rule is needed for two reasons: + * This rule is needed for multiple reasons: * 1. a user might specify an invalid aggregate (SELECT foo GROUP BY bar) * 2. the order/having might contain a non-grouped attribute. This is typically * caught by the Analyzer however if wrapped in a function (ABS()) it gets resolved * (because the expression gets resolved little by little without being pushed down, * without the Analyzer modifying anything. + * 3. composite agg (used for GROUP BY) allows ordering only on the group keys */ private static boolean checkGroupBy(LogicalPlan p, Set localFailures, Map resolvedFunctions, Set groupingFailures) { @@ -225,7 +226,7 @@ && checkGroupByOrder(p, localFailures, groupingFailures, resolvedFunctions) && checkGroupByHaving(p, localFailures, groupingFailures, resolvedFunctions); } - // check whether an orderBy failed + // check whether an orderBy failed or if it occurs on a non-key private static boolean checkGroupByOrder(LogicalPlan p, Set localFailures, Set groupingFailures, Map functions) { if (p instanceof OrderBy) { @@ -234,7 +235,23 @@ private static boolean checkGroupByOrder(LogicalPlan p, Set localFailur Aggregate a = (Aggregate) o.child(); Map> missing = new LinkedHashMap<>(); - o.order().forEach(oe -> oe.collectFirstChildren(c -> checkGroupMatch(c, oe, a.groupings(), missing, functions))); + o.order().forEach(oe -> { + Expression e = oe.child(); + // cannot order by aggregates (not supported by composite) + if (Functions.isAggregate(e)) { + missing.put(e, oe); + return; + } + + // make sure to compare attributes directly + if (Expressions.anyMatch(a.groupings(), + g -> e.semanticEquals(e instanceof Attribute ? Expressions.attribute(g) : g))) { + return; + } + + // nothing matched, cannot group by it + missing.put(e, oe); + }); if (!missing.isEmpty()) { String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 355c4d2f7b763..60875e0194a0c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -111,7 +111,7 @@ public void testGroupByOrderByNonGrouped() { } public void testGroupByOrderByScalarOverNonGrouped() { - assertEquals("1:50: Cannot order by non-grouped column [date], expected [text]", + assertEquals("1:50: Cannot order by non-grouped column [YEAR(date [UTC])], expected [text]", verify("SELECT MAX(int) FROM test GROUP BY text ORDER BY YEAR(date)")); } @@ -144,4 +144,19 @@ public void testUnsupportedType() { assertEquals("1:8: Cannot use field [unsupported] type [ip_range] as is unsupported", verify("SELECT unsupported FROM test")); } -} + + public void testGroupByOrderByNonKey() { + assertEquals("1:52: Cannot order by non-grouped column [a], expected [bool]", + verify("SELECT AVG(int) a FROM test GROUP BY bool ORDER BY a")); + } + + public void testGroupByOrderByFunctionOverKey() { + assertEquals("1:44: Cannot order by non-grouped column [MAX(int)], expected [int]", + verify("SELECT int FROM test GROUP BY int ORDER BY MAX(int)")); + } + + public void testGroupByOrderByScore() { + assertEquals("1:44: Cannot order by non-grouped column [SCORE()], expected [int]", + verify("SELECT int FROM test GROUP BY int ORDER BY SCORE()")); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java index 154885261fdb8..5d6f479b7558d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java @@ -49,4 +49,18 @@ public void testMultiGroupBy() { assertEquals("1:32: Currently, only a single expression can be used with GROUP BY; please select one of [bool, keyword]", verify("SELECT bool FROM test GROUP BY bool, keyword")); } + + // + // TODO potential improvements + // + // regarding resolution + // public void testGroupByOrderByKeyAlias() { + // assertEquals("1:8: Cannot use field [unsupported] type [ip_range] as is unsupported", + // verify("SELECT int i FROM test GROUP BY int ORDER BY i")); + // } + // + // public void testGroupByAlias() { + // assertEquals("1:8: Cannot use field [unsupported] type [ip_range] as is unsupported", + // verify("SELECT int i FROM test GROUP BY i ORDER BY int")); + // } } From 03dd2ab499742336263d09c9b69f62a8a76413da Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Tue, 15 May 2018 22:49:05 +0300 Subject: [PATCH 34/74] SQL: eliminate disabled tests --- .../sql/planner/VerifierErrorMessagesTests.java | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java index 5d6f479b7558d..154885261fdb8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java @@ -49,18 +49,4 @@ public void testMultiGroupBy() { assertEquals("1:32: Currently, only a single expression can be used with GROUP BY; please select one of [bool, keyword]", verify("SELECT bool FROM test GROUP BY bool, keyword")); } - - // - // TODO potential improvements - // - // regarding resolution - // public void testGroupByOrderByKeyAlias() { - // assertEquals("1:8: Cannot use field [unsupported] type [ip_range] as is unsupported", - // verify("SELECT int i FROM test GROUP BY int ORDER BY i")); - // } - // - // public void testGroupByAlias() { - // assertEquals("1:8: Cannot use field [unsupported] type [ip_range] as is unsupported", - // verify("SELECT int i FROM test GROUP BY i ORDER BY int")); - // } } From 4f9dd3716910b1f896b72dc5e548b45d1ae5f18a Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 15 May 2018 13:07:58 -0700 Subject: [PATCH 35/74] Add support for search templates to the high-level REST client. (#30473) --- client/rest-high-level/build.gradle | 1 + .../client/RequestConverters.java | 33 ++- .../client/RestHighLevelClient.java | 28 +++ .../client/RequestConvertersTests.java | 125 ++++++++--- .../org/elasticsearch/client/SearchIT.java | 104 +++++++++ .../documentation/SearchDocumentationIT.java | 129 +++++++++++ .../search/search-template.asciidoc | 117 ++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../RestMultiSearchTemplateAction.java | 2 +- .../RestRenderSearchTemplateAction.java | 2 +- .../mustache/RestSearchTemplateAction.java | 33 +-- .../mustache/SearchTemplateRequest.java | 85 ++++++- .../mustache/SearchTemplateResponse.java | 33 ++- .../script/mustache/SearchTemplateIT.java | 6 +- .../mustache/SearchTemplateRequestTests.java | 152 +++++-------- .../SearchTemplateRequestXContentTests.java | 197 ++++++++++++++++ .../mustache/SearchTemplateResponseTests.java | 211 ++++++++++++++++++ .../search/RandomSearchRequestGenerator.java | 2 +- 18 files changed, 1090 insertions(+), 172 deletions(-) create mode 100644 docs/java-rest/high-level/search/search-template.asciidoc create mode 100644 modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestXContentTests.java create mode 100644 modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index c273e76a92aed..222de9608aeb9 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -40,6 +40,7 @@ dependencies { compile "org.elasticsearch.plugin:parent-join-client:${version}" compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" compile "org.elasticsearch.plugin:rank-eval-client:${version}" + compile "org.elasticsearch.plugin:lang-mustache-client:${version}" testCompile "org.elasticsearch.client:test:${version}" testCompile "org.elasticsearch.test:framework:${version}" diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 2e7b4ba74cc39..310aafcb6b817 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -80,6 +80,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.script.mustache.SearchTemplateRequest; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.ByteArrayOutputStream; @@ -458,6 +459,15 @@ static Request search(SearchRequest searchRequest) throws IOException { Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), "_search")); Params params = new Params(request); + addSearchRequestParams(params, searchRequest); + + if (searchRequest.source() != null) { + request.setEntity(createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE)); + } + return request; + } + + private static void addSearchRequestParams(Params params, SearchRequest searchRequest) { params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true"); params.withRouting(searchRequest.routing()); params.withPreference(searchRequest.preference()); @@ -473,11 +483,6 @@ static Request search(SearchRequest searchRequest) throws IOException { if (searchRequest.scroll() != null) { params.putParam("scroll", searchRequest.scroll().keepAlive()); } - - if (searchRequest.source() != null) { - request.setEntity(createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE)); - } - return request; } static Request searchScroll(SearchScrollRequest searchScrollRequest) throws IOException { @@ -507,6 +512,24 @@ static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOExcep return request; } + static Request searchTemplate(SearchTemplateRequest searchTemplateRequest) throws IOException { + Request request; + + if (searchTemplateRequest.isSimulate()) { + request = new Request(HttpGet.METHOD_NAME, "_render/template"); + } else { + SearchRequest searchRequest = searchTemplateRequest.getRequest(); + String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search/template"); + request = new Request(HttpGet.METHOD_NAME, endpoint); + + Params params = new Params(request); + addSearchRequestParams(params, searchRequest); + } + + request.setEntity(createEntity(searchTemplateRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request existsAlias(GetAliasesRequest getAliasesRequest) { if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) && (getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 1985d6bd06dd4..5dbf2709d9988 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -64,6 +64,8 @@ import org.elasticsearch.plugins.spi.NamedXContentProvider; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.mustache.SearchTemplateRequest; +import org.elasticsearch.script.mustache.SearchTemplateResponse; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrixAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.adjacency.ParsedAdjacencyMatrix; @@ -501,6 +503,32 @@ public final void clearScrollAsync(ClearScrollRequest clearScrollRequest, listener, emptySet(), headers); } + /** + * Executes a request using the Search Template API. + * + * See Search Template API + * on elastic.co. + */ + public final SearchTemplateResponse searchTemplate(SearchTemplateRequest searchTemplateRequest, + Header... headers) throws IOException { + return performRequestAndParseEntity(searchTemplateRequest, RequestConverters::searchTemplate, + SearchTemplateResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously executes a request using the Search Template API + * + * See Search Template API + * on elastic.co. + */ + public final void searchTemplateAsync(SearchTemplateRequest searchTemplateRequest, + ActionListener listener, + Header... headers) { + performRequestAsyncAndParseEntity(searchTemplateRequest, RequestConverters::searchTemplate, + SearchTemplateResponse::fromXContent, listener, emptySet(), headers); + } + + /** * Executes a request using the Ranking Evaluation API. * diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 2d4ef8b6413d9..9c75d67e04304 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -95,6 +95,8 @@ import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.script.mustache.SearchTemplateRequest; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; @@ -1011,36 +1013,7 @@ public void testSearch() throws Exception { searchRequest.types(types); Map expectedParams = new HashMap<>(); - expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true"); - if (randomBoolean()) { - searchRequest.routing(randomAlphaOfLengthBetween(3, 10)); - expectedParams.put("routing", searchRequest.routing()); - } - if (randomBoolean()) { - searchRequest.preference(randomAlphaOfLengthBetween(3, 10)); - expectedParams.put("preference", searchRequest.preference()); - } - if (randomBoolean()) { - searchRequest.searchType(randomFrom(SearchType.values())); - } - expectedParams.put("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT)); - if (randomBoolean()) { - searchRequest.requestCache(randomBoolean()); - expectedParams.put("request_cache", Boolean.toString(searchRequest.requestCache())); - } - if (randomBoolean()) { - searchRequest.allowPartialSearchResults(randomBoolean()); - expectedParams.put("allow_partial_search_results", Boolean.toString(searchRequest.allowPartialSearchResults())); - } - if (randomBoolean()) { - searchRequest.setBatchedReduceSize(randomIntBetween(2, Integer.MAX_VALUE)); - } - expectedParams.put("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize())); - if (randomBoolean()) { - searchRequest.scroll(randomTimeValue()); - expectedParams.put("scroll", searchRequest.scroll().keepAlive().getStringRep()); - } - + setRandomSearchParams(searchRequest, expectedParams); setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -1189,6 +1162,65 @@ public void testClearScroll() throws IOException { assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } + public void testSearchTemplate() throws Exception { + // Create a random request. + String[] indices = randomIndicesNames(0, 5); + SearchRequest searchRequest = new SearchRequest(indices); + + Map expectedParams = new HashMap<>(); + setRandomSearchParams(searchRequest, expectedParams); + setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams); + + SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(searchRequest); + + searchTemplateRequest.setScript("{\"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" }}}"); + searchTemplateRequest.setScriptType(ScriptType.INLINE); + searchTemplateRequest.setProfile(randomBoolean()); + + Map scriptParams = new HashMap<>(); + scriptParams.put("field", "name"); + scriptParams.put("value", "soren"); + searchTemplateRequest.setScriptParams(scriptParams); + + // Verify that the resulting REST request looks as expected. + Request request = RequestConverters.searchTemplate(searchTemplateRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_search/template"); + + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals(endpoint.toString(), request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertToXContentBody(searchTemplateRequest, request.getEntity()); + } + + public void testRenderSearchTemplate() throws Exception { + // Create a simple request. + SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(); + searchTemplateRequest.setSimulate(true); // Setting simulate true means the template should only be rendered. + + searchTemplateRequest.setScript("template1"); + searchTemplateRequest.setScriptType(ScriptType.STORED); + searchTemplateRequest.setProfile(randomBoolean()); + + Map scriptParams = new HashMap<>(); + scriptParams.put("field", "name"); + scriptParams.put("value", "soren"); + searchTemplateRequest.setScriptParams(scriptParams); + + // Verify that the resulting REST request looks as expected. + Request request = RequestConverters.searchTemplate(searchTemplateRequest); + String endpoint = "_render/template"; + + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals(endpoint, request.getEndpoint()); + assertEquals(Collections.emptyMap(), request.getParameters()); + assertToXContentBody(searchTemplateRequest, request.getEntity()); + } + public void testExistsAlias() { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); @@ -1662,6 +1694,39 @@ private static void randomizeFetchSourceContextParams(Consumer expectedParams) { + expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true"); + if (randomBoolean()) { + searchRequest.routing(randomAlphaOfLengthBetween(3, 10)); + expectedParams.put("routing", searchRequest.routing()); + } + if (randomBoolean()) { + searchRequest.preference(randomAlphaOfLengthBetween(3, 10)); + expectedParams.put("preference", searchRequest.preference()); + } + if (randomBoolean()) { + searchRequest.searchType(randomFrom(SearchType.values())); + } + expectedParams.put("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT)); + if (randomBoolean()) { + searchRequest.requestCache(randomBoolean()); + expectedParams.put("request_cache", Boolean.toString(searchRequest.requestCache())); + } + if (randomBoolean()) { + searchRequest.allowPartialSearchResults(randomBoolean()); + expectedParams.put("allow_partial_search_results", Boolean.toString(searchRequest.allowPartialSearchResults())); + } + if (randomBoolean()) { + searchRequest.setBatchedReduceSize(randomIntBetween(2, Integer.MAX_VALUE)); + } + expectedParams.put("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize())); + if (randomBoolean()) { + searchRequest.scroll(randomTimeValue()); + expectedParams.put("scroll", searchRequest.scroll().keepAlive().getStringRep()); + } + } + private static void setRandomIndicesOptions(Consumer setter, Supplier getter, Map expectedParams) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 549b4ce0a85c5..e147642fc73bd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -38,8 +38,11 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.ScriptQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; @@ -48,6 +51,8 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.script.mustache.SearchTemplateRequest; +import org.elasticsearch.script.mustache.SearchTemplateResponse; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.range.Range; @@ -69,10 +74,12 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; @@ -733,6 +740,103 @@ public void testMultiSearch_failure() throws Exception { assertThat(multiSearchResponse.getResponses()[1].getResponse(), nullValue()); } + public void testSearchTemplate() throws IOException { + SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(); + searchTemplateRequest.setRequest(new SearchRequest("index")); + + searchTemplateRequest.setScriptType(ScriptType.INLINE); + searchTemplateRequest.setScript( + "{" + + " \"query\": {" + + " \"match\": {" + + " \"num\": {{number}}" + + " }" + + " }" + + "}"); + + Map scriptParams = new HashMap<>(); + scriptParams.put("number", 10); + searchTemplateRequest.setScriptParams(scriptParams); + + searchTemplateRequest.setExplain(true); + searchTemplateRequest.setProfile(true); + + SearchTemplateResponse searchTemplateResponse = execute(searchTemplateRequest, + highLevelClient()::searchTemplate, + highLevelClient()::searchTemplateAsync); + + assertNull(searchTemplateResponse.getSource()); + + SearchResponse searchResponse = searchTemplateResponse.getResponse(); + assertNotNull(searchResponse); + + assertEquals(1, searchResponse.getHits().totalHits); + assertEquals(1, searchResponse.getHits().getHits().length); + assertThat(searchResponse.getHits().getMaxScore(), greaterThan(0f)); + + SearchHit hit = searchResponse.getHits().getHits()[0]; + assertNotNull(hit.getExplanation()); + + assertFalse(searchResponse.getProfileResults().isEmpty()); + } + + public void testNonExistentSearchTemplate() { + SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(); + searchTemplateRequest.setRequest(new SearchRequest("index")); + + searchTemplateRequest.setScriptType(ScriptType.STORED); + searchTemplateRequest.setScript("non-existent"); + searchTemplateRequest.setScriptParams(Collections.emptyMap()); + + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, + () -> execute(searchTemplateRequest, + highLevelClient()::searchTemplate, + highLevelClient()::searchTemplateAsync)); + + assertEquals(RestStatus.NOT_FOUND, exception.status()); + } + + public void testRenderSearchTemplate() throws IOException { + SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(); + + searchTemplateRequest.setScriptType(ScriptType.INLINE); + searchTemplateRequest.setScript( + "{" + + " \"query\": {" + + " \"match\": {" + + " \"num\": {{number}}" + + " }" + + " }" + + "}"); + + Map scriptParams = new HashMap<>(); + scriptParams.put("number", 10); + searchTemplateRequest.setScriptParams(scriptParams); + + // Setting simulate true causes the template to only be rendered. + searchTemplateRequest.setSimulate(true); + + SearchTemplateResponse searchTemplateResponse = execute(searchTemplateRequest, + highLevelClient()::searchTemplate, + highLevelClient()::searchTemplateAsync); + assertNull(searchTemplateResponse.getResponse()); + + BytesReference expectedSource = BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .startObject("query") + .startObject("match") + .field("num", 10) + .endObject() + .endObject() + .endObject()); + + BytesReference actualSource = searchTemplateResponse.getSource(); + assertNotNull(actualSource); + + assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON); + } + public void testFieldCaps() throws IOException { FieldCapabilitiesRequest request = new FieldCapabilitiesRequest() .indices("index1", "index2") diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 8a12016025c3e..463c5f7d12f5e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -41,7 +41,11 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.TimeValue; @@ -60,6 +64,9 @@ import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RatedSearchHit; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.script.mustache.SearchTemplateRequest; +import org.elasticsearch.script.mustache.SearchTemplateResponse; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -92,6 +99,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -706,9 +714,130 @@ public void onFailure(Exception e) { } } + public void testSearchTemplateWithInlineScript() throws Exception { + indexSearchTestData(); + RestHighLevelClient client = highLevelClient(); + + // tag::search-template-request-inline + SearchTemplateRequest request = new SearchTemplateRequest(); + request.setRequest(new SearchRequest("posts")); // <1> + + request.setScriptType(ScriptType.INLINE); + request.setScript( // <2> + "{" + + " \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," + + " \"size\" : \"{{size}}\"" + + "}"); + + Map scriptParams = new HashMap<>(); + scriptParams.put("field", "title"); + scriptParams.put("value", "elasticsearch"); + scriptParams.put("size", 5); + request.setScriptParams(scriptParams); // <3> + // end::search-template-request-inline + + // tag::search-template-response + SearchTemplateResponse response = client.searchTemplate(request); + SearchResponse searchResponse = response.getResponse(); + // end::search-template-response + + assertNotNull(searchResponse); + assertTrue(searchResponse.getHits().totalHits > 0); + + // tag::render-search-template-request + request.setSimulate(true); // <1> + // end::render-search-template-request + + // tag::render-search-template-response + SearchTemplateResponse renderResponse = client.searchTemplate(request); + BytesReference source = renderResponse.getSource(); // <1> + // end::render-search-template-response + + assertNotNull(source); + assertEquals(( + "{" + + " \"size\" : \"5\"," + + " \"query\": { \"match\" : { \"title\" : \"elasticsearch\" } }" + + "}").replaceAll("\\s+", ""), source.utf8ToString()); + } + + public void testSearchTemplateWithStoredScript() throws Exception { + indexSearchTestData(); + RestHighLevelClient client = highLevelClient(); + RestClient restClient = client(); + + // tag::register-script + Request scriptRequest = new Request("POST", "_scripts/title_search"); + scriptRequest.setJsonEntity( + "{" + + " \"script\": {" + + " \"lang\": \"mustache\"," + + " \"source\": {" + + " \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," + + " \"size\" : \"{{size}}\"" + + " }" + + " }" + + "}"); + Response scriptResponse = restClient.performRequest(scriptRequest); + // end::register-script + assertEquals(RestStatus.OK.getStatus(), scriptResponse.getStatusLine().getStatusCode()); + + // tag::search-template-request-stored + SearchTemplateRequest request = new SearchTemplateRequest(); + request.setRequest(new SearchRequest("posts")); + + request.setScriptType(ScriptType.STORED); + request.setScript("title_search"); + + Map params = new HashMap<>(); + params.put("field", "title"); + params.put("value", "elasticsearch"); + params.put("size", 5); + request.setScriptParams(params); + // end::search-template-request-stored + + // tag::search-template-request-options + request.setExplain(true); + request.setProfile(true); + // end::search-template-request-options + + // tag::search-template-execute + SearchTemplateResponse response = client.searchTemplate(request); + // end::search-template-execute + + SearchResponse searchResponse = response.getResponse(); + assertNotNull(searchResponse); + assertTrue(searchResponse.getHits().totalHits > 0); + + // tag::search-template-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(SearchTemplateResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::search-template-execute-listener + + // Replace the empty listener by a blocking listener for tests. + CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::search-template-execute-async + client.searchTemplateAsync(request, listener); // <1> + // end::search-template-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + public void testFieldCaps() throws Exception { indexSearchTestData(); RestHighLevelClient client = highLevelClient(); + // tag::field-caps-request FieldCapabilitiesRequest request = new FieldCapabilitiesRequest() .fields("user") diff --git a/docs/java-rest/high-level/search/search-template.asciidoc b/docs/java-rest/high-level/search/search-template.asciidoc new file mode 100644 index 0000000000000..3f0dfb8ab28e0 --- /dev/null +++ b/docs/java-rest/high-level/search/search-template.asciidoc @@ -0,0 +1,117 @@ +[[java-rest-high-search-template]] +=== Search Template API + +The search template API allows for searches to be executed from a template based +on the mustache language, and also for previewing rendered templates. + +[[java-rest-high-search-template-request]] +==== Search Template Request + +===== Inline Templates + +In the most basic form of request, the search template is specified inline: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-request-inline] +-------------------------------------------------- +<1> The search is executed against the `posts` index. +<2> The template defines the structure of the search source. It is passed +as a string because mustache templates are not always valid JSON. +<3> Before running the search, the template is rendered with the provided parameters. + +===== Registered Templates + +Search templates can be registered in advance through stored scripts API. Note that +the stored scripts API is not yet available in the high-level REST client, so in this +example we use the low-level REST client. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[register-script] +-------------------------------------------------- + +Instead of providing an inline script, we can refer to this registered template in the request: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-request-stored] +-------------------------------------------------- + +===== Rendering Templates + +Given parameter values, a template can be rendered without executing a search: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[render-search-template-request] +-------------------------------------------------- +<1> Setting `simulate` to `true` causes the search template to only be rendered. + +Both inline and pre-registered templates can be rendered. + +===== Optional Arguments + +As in standard search requests, the `explain` and `profile` options are supported: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-request-options] +-------------------------------------------------- + +===== Additional References + +The {ref}/search-template.html[Search Template documentation] contains further examples of how search requests can be templated. + +[[java-rest-high-search-template-sync]] +==== Synchronous Execution + +The `searchTemplate` method executes the request synchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-execute] +-------------------------------------------------- + +==== Asynchronous Execution + +A search template request can be executed asynchronously through the `searchTemplateAsync` +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-execute-async] +-------------------------------------------------- +<1> The `SearchTemplateRequest` to execute and the `ActionListener` to call when the execution completes. + +The asynchronous method does not block and returns immediately. Once the request completes, the +`ActionListener` is called back using the `onResponse` method if the execution completed successfully, +or using the `onFailure` method if it failed. + +A typical listener for `SearchTemplateResponse` is constructed as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. +<2> Called when the whole `SearchTemplateRequest` fails. + +==== Search Template Response + +For a standard search template request, the response contains a `SearchResponse` object +with the result of executing the search: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-response] +-------------------------------------------------- + +If `simulate` was set to `true` in the request, then the response +will contain the rendered search source instead of a `SearchResponse`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[render-search-template-response] +-------------------------------------------------- +<1> The rendered source in bytes, in our example `{"query": { "match" : { "title" : "elasticsearch" }}, "size" : 5}`. diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 2dee4643e73eb..62e65ec650bca 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -31,6 +31,7 @@ The Java High Level REST Client supports the following Search APIs: * <> * <> * <> +* <> * <> * <> * <> @@ -38,6 +39,7 @@ The Java High Level REST Client supports the following Search APIs: include::search/search.asciidoc[] include::search/scroll.asciidoc[] include::search/multi-search.asciidoc[] +include::search/search-template.asciidoc[] include::search/field-caps.asciidoc[] include::search/rank-eval.asciidoc[] diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java index fd797c4340a8f..9969e6b38e54a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java @@ -77,7 +77,7 @@ public static MultiSearchTemplateRequest parseRequest(RestRequest restRequest, b RestMultiSearchAction.parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, bytes) -> { - SearchTemplateRequest searchTemplateRequest = RestSearchTemplateAction.parse(bytes); + SearchTemplateRequest searchTemplateRequest = SearchTemplateRequest.fromXContent(bytes); if (searchTemplateRequest.getScript() != null) { searchTemplateRequest.setRequest(searchRequest); multiRequest.add(searchTemplateRequest); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java index d8c67839cb80f..75acc09424359 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java @@ -52,7 +52,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client // Creates the render template request SearchTemplateRequest renderRequest; try (XContentParser parser = request.contentOrSourceParamParser()) { - renderRequest = RestSearchTemplateAction.parse(parser); + renderRequest = SearchTemplateRequest.fromXContent(parser); } renderRequest.setSimulate(true); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index 7ab9aa6003334..f42afcc19b80f 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -47,33 +47,6 @@ public class RestSearchTemplateAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Collections.singleton(RestSearchAction.TYPED_KEYS_PARAM); - private static final ObjectParser PARSER; - static { - PARSER = new ObjectParser<>("search_template"); - PARSER.declareField((parser, request, s) -> - request.setScriptParams(parser.map()) - , new ParseField("params"), ObjectParser.ValueType.OBJECT); - PARSER.declareString((request, s) -> { - request.setScriptType(ScriptType.STORED); - request.setScript(s); - }, new ParseField("id")); - PARSER.declareBoolean(SearchTemplateRequest::setExplain, new ParseField("explain")); - PARSER.declareBoolean(SearchTemplateRequest::setProfile, new ParseField("profile")); - PARSER.declareField((parser, request, value) -> { - request.setScriptType(ScriptType.INLINE); - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - //convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder) - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - request.setScript(Strings.toString(builder.copyCurrentStructure(parser))); - } catch (IOException e) { - throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e); - } - } else { - request.setScript(parser.text()); - } - }, new ParseField("source", "inline", "template"), ObjectParser.ValueType.OBJECT_OR_STRING); - } - public RestSearchTemplateAction(Settings settings, RestController controller) { super(settings); @@ -99,17 +72,13 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client // Creates the search template request SearchTemplateRequest searchTemplateRequest; try (XContentParser parser = request.contentOrSourceParamParser()) { - searchTemplateRequest = PARSER.parse(parser, new SearchTemplateRequest(), null); + searchTemplateRequest = SearchTemplateRequest.fromXContent(parser); } searchTemplateRequest.setRequest(searchRequest); return channel -> client.execute(SearchTemplateAction.INSTANCE, searchTemplateRequest, new RestStatusToXContentListener<>(channel)); } - public static SearchTemplateRequest parse(XContentParser parser) throws IOException { - return PARSER.parse(parser, new SearchTemplateRequest(), null); - } - @Override protected Set responseParams() { return RESPONSE_PARAMS; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java index b0186b7b0e3cf..da3cc3688149c 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java @@ -23,19 +23,28 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.ScriptType; import java.io.IOException; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to execute a search based on a search template. */ -public class SearchTemplateRequest extends ActionRequest implements CompositeIndicesRequest { +public class SearchTemplateRequest extends ActionRequest implements CompositeIndicesRequest, ToXContentObject { private SearchRequest request; private boolean simulate = false; @@ -60,6 +69,24 @@ public SearchRequest getRequest() { return request; } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchTemplateRequest request1 = (SearchTemplateRequest) o; + return simulate == request1.simulate && + explain == request1.explain && + profile == request1.profile && + Objects.equals(request, request1.request) && + scriptType == request1.scriptType && + Objects.equals(script, request1.script) && + Objects.equals(scriptParams, request1.scriptParams); + } + + @Override + public int hashCode() { + return Objects.hash(request, simulate, explain, profile, scriptType, script, scriptParams); + } public boolean isSimulate() { return simulate; @@ -134,6 +161,62 @@ public ActionRequestValidationException validate() { return validationException; } + private static ParseField ID_FIELD = new ParseField("id"); + private static ParseField SOURCE_FIELD = new ParseField("source", "inline", "template"); + + private static ParseField PARAMS_FIELD = new ParseField("params"); + private static ParseField EXPLAIN_FIELD = new ParseField("explain"); + private static ParseField PROFILE_FIELD = new ParseField("profile"); + + private static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>("search_template"); + PARSER.declareField((parser, request, s) -> + request.setScriptParams(parser.map()) + , PARAMS_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareString((request, s) -> { + request.setScriptType(ScriptType.STORED); + request.setScript(s); + }, ID_FIELD); + PARSER.declareBoolean(SearchTemplateRequest::setExplain, EXPLAIN_FIELD); + PARSER.declareBoolean(SearchTemplateRequest::setProfile, PROFILE_FIELD); + PARSER.declareField((parser, request, value) -> { + request.setScriptType(ScriptType.INLINE); + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + //convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder) + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + request.setScript(Strings.toString(builder.copyCurrentStructure(parser))); + } catch (IOException e) { + throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e); + } + } else { + request.setScript(parser.text()); + } + }, SOURCE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING); + } + + public static SearchTemplateRequest fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, new SearchTemplateRequest(), null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (scriptType == ScriptType.STORED) { + builder.field(ID_FIELD.getPreferredName(), script); + } else if (scriptType == ScriptType.INLINE) { + builder.field(SOURCE_FIELD.getPreferredName(), script); + } else { + throw new UnsupportedOperationException("Unrecognized script type [" + scriptType + "]."); + } + + return builder.field(PARAMS_FIELD.getPreferredName(), scriptParams) + .field(EXPLAIN_FIELD.getPreferredName(), explain) + .field(PROFILE_FIELD.getPreferredName(), profile) + .endObject(); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 792d993915992..500a5a399ef4a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -21,18 +21,23 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.io.InputStream; +import java.util.Map; -public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject { +public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject { + public static ParseField TEMPLATE_OUTPUT_FIELD = new ParseField("template_output"); /** Contains the source of the rendered template **/ private BytesReference source; @@ -77,6 +82,30 @@ public void readFrom(StreamInput in) throws IOException { response = in.readOptionalStreamable(SearchResponse::new); } + public static SearchTemplateResponse fromXContent(XContentParser parser) throws IOException { + SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); + Map contentAsMap = parser.map(); + + if (contentAsMap.containsKey(TEMPLATE_OUTPUT_FIELD.getPreferredName())) { + Object source = contentAsMap.get(TEMPLATE_OUTPUT_FIELD.getPreferredName()); + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON) + .value(source); + searchTemplateResponse.setSource(BytesReference.bytes(builder)); + } else { + XContentType contentType = parser.contentType(); + XContentBuilder builder = XContentFactory.contentBuilder(contentType) + .map(contentAsMap); + XContentParser searchResponseParser = contentType.xContent().createParser( + parser.getXContentRegistry(), + parser.getDeprecationHandler(), + BytesReference.bytes(builder).streamInput()); + + SearchResponse searchResponse = SearchResponse.fromXContent(searchResponseParser); + searchTemplateResponse.setResponse(searchResponse); + } + return searchTemplateResponse; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (hasResponse()) { @@ -85,7 +114,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); //we can assume the template is always json as we convert it before compiling it try (InputStream stream = source.streamInput()) { - builder.rawField("template_output", stream, XContentType.JSON); + builder.rawField(TEMPLATE_OUTPUT_FIELD.getPreferredName(), stream, XContentType.JSON); } builder.endObject(); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index 1529b655a5042..fe2fedf62b559 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -101,7 +101,7 @@ public void testTemplateQueryAsEscapedString() throws Exception { + " \"size\": 1" + " }" + "}"; - SearchTemplateRequest request = RestSearchTemplateAction.parse(createParser(JsonXContent.jsonXContent, query)); + SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, query)); request.setRequest(searchRequest); SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)); @@ -122,7 +122,7 @@ public void testTemplateQueryAsEscapedStringStartingWithConditionalClause() thro + " \"use_size\": true" + " }" + "}"; - SearchTemplateRequest request = RestSearchTemplateAction.parse(createParser(JsonXContent.jsonXContent, templateString)); + SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString)); request.setRequest(searchRequest); SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)); @@ -143,7 +143,7 @@ public void testTemplateQueryAsEscapedStringWithConditionalClauseAtEnd() throws + " \"use_size\": true" + " }" + "}"; - SearchTemplateRequest request = RestSearchTemplateAction.parse(createParser(JsonXContent.jsonXContent, templateString)); + SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString)); request.setRequest(searchRequest); SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java index 9cdca70f0e1a6..7d4a6479727e2 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java @@ -19,117 +19,77 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.common.xcontent.XContentParseException; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.search.RandomSearchRequestGenerator; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.AbstractStreamableTestCase; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.function.Consumer; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.Matchers.hasItems; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.nullValue; - -public class SearchTemplateRequestTests extends ESTestCase { - - public void testParseInlineTemplate() throws Exception { - String source = "{" + - " 'source' : {\n" + - " 'query': {\n" + - " 'terms': {\n" + - " 'status': [\n" + - " '{{#status}}',\n" + - " '{{.}}',\n" + - " '{{/status}}'\n" + - " ]\n" + - " }\n" + - " }\n" + - " }" + - "}"; - - SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source)); - assertThat(request.getScript(), equalTo("{\"query\":{\"terms\":{\"status\":[\"{{#status}}\",\"{{.}}\",\"{{/status}}\"]}}}")); - assertThat(request.getScriptType(), equalTo(ScriptType.INLINE)); - assertThat(request.getScriptParams(), nullValue()); - } +public class SearchTemplateRequestTests extends AbstractStreamableTestCase { - public void testParseInlineTemplateWithParams() throws Exception { - String source = "{" + - " 'source' : {" + - " 'query': { 'match' : { '{{my_field}}' : '{{my_value}}' } }," + - " 'size' : '{{my_size}}'" + - " }," + - " 'params' : {" + - " 'my_field' : 'foo'," + - " 'my_value' : 'bar'," + - " 'my_size' : 5" + - " }" + - "}"; - - SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source)); - assertThat(request.getScript(), equalTo("{\"query\":{\"match\":{\"{{my_field}}\":\"{{my_value}}\"}},\"size\":\"{{my_size}}\"}")); - assertThat(request.getScriptType(), equalTo(ScriptType.INLINE)); - assertThat(request.getScriptParams().size(), equalTo(3)); - assertThat(request.getScriptParams(), hasEntry("my_field", "foo")); - assertThat(request.getScriptParams(), hasEntry("my_value", "bar")); - assertThat(request.getScriptParams(), hasEntry("my_size", 5)); + @Override + protected SearchTemplateRequest createBlankInstance() { + return new SearchTemplateRequest(); } - public void testParseInlineTemplateAsString() throws Exception { - String source = "{'source' : '{\\\"query\\\":{\\\"bool\\\":{\\\"must\\\":{\\\"match\\\":{\\\"foo\\\":\\\"{{text}}\\\"}}}}}'}"; - - SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source)); - assertThat(request.getScript(), equalTo("{\"query\":{\"bool\":{\"must\":{\"match\":{\"foo\":\"{{text}}\"}}}}}")); - assertThat(request.getScriptType(), equalTo(ScriptType.INLINE)); - assertThat(request.getScriptParams(), nullValue()); + @Override + protected SearchTemplateRequest createTestInstance() { + return createRandomRequest(); } - @SuppressWarnings("unchecked") - public void testParseInlineTemplateAsStringWithParams() throws Exception { - String source = "{'source' : '{\\\"query\\\":{\\\"match\\\":{\\\"{{field}}\\\":\\\"{{value}}\\\"}}}', " + - "'params': {'status': ['pending', 'published']}}"; - - SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source)); - assertThat(request.getScript(), equalTo("{\"query\":{\"match\":{\"{{field}}\":\"{{value}}\"}}}")); - assertThat(request.getScriptType(), equalTo(ScriptType.INLINE)); - assertThat(request.getScriptParams().size(), equalTo(1)); - assertThat(request.getScriptParams(), hasKey("status")); - assertThat((List) request.getScriptParams().get("status"), hasItems("pending", "published")); + @Override + protected SearchTemplateRequest mutateInstance(SearchTemplateRequest instance) throws IOException { + List> mutators = new ArrayList<>(); + + mutators.add(request -> request.setScriptType( + randomValueOtherThan(request.getScriptType(), () -> randomFrom(ScriptType.values())))); + mutators.add(request -> request.setScript( + randomValueOtherThan(request.getScript(), () -> randomAlphaOfLength(50)))); + + mutators.add(request -> { + Map mutatedScriptParams = new HashMap<>(request.getScriptParams()); + String newField = randomValueOtherThanMany(mutatedScriptParams::containsKey, () -> randomAlphaOfLength(5)); + mutatedScriptParams.put(newField, randomAlphaOfLength(10)); + request.setScriptParams(mutatedScriptParams); + }); + + mutators.add(request -> request.setProfile(!request.isProfile())); + mutators.add(request -> request.setExplain(!request.isExplain())); + mutators.add(request -> request.setSimulate(!request.isSimulate())); + + mutators.add(request -> request.setRequest( + RandomSearchRequestGenerator.randomSearchRequest(SearchSourceBuilder::searchSource))); + + SearchTemplateRequest mutatedInstance = copyInstance(instance); + Consumer mutator = randomFrom(mutators); + mutator.accept(mutatedInstance); + return mutatedInstance; } - public void testParseStoredTemplate() throws Exception { - String source = "{'id' : 'storedTemplate'}"; - - SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source)); - assertThat(request.getScript(), equalTo("storedTemplate")); - assertThat(request.getScriptType(), equalTo(ScriptType.STORED)); - assertThat(request.getScriptParams(), nullValue()); - } - public void testParseStoredTemplateWithParams() throws Exception { - String source = "{'id' : 'another_template', 'params' : {'bar': 'foo'}}"; + public static SearchTemplateRequest createRandomRequest() { + SearchTemplateRequest request = new SearchTemplateRequest(); + request.setScriptType(randomFrom(ScriptType.values())); + request.setScript(randomAlphaOfLength(50)); - SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source)); - assertThat(request.getScript(), equalTo("another_template")); - assertThat(request.getScriptType(), equalTo(ScriptType.STORED)); - assertThat(request.getScriptParams().size(), equalTo(1)); - assertThat(request.getScriptParams(), hasEntry("bar", "foo")); - } + Map scriptParams = new HashMap<>(); + for (int i = 0; i < randomInt(10); i++) { + scriptParams.put(randomAlphaOfLength(5), randomAlphaOfLength(10)); + } + request.setScriptParams(scriptParams); - public void testParseWrongTemplate() { - // Unclosed template id - expectThrows(XContentParseException.class, () -> RestSearchTemplateAction.parse(newParser("{'id' : 'another_temp }"))); - } + request.setExplain(randomBoolean()); + request.setProfile(randomBoolean()); + request.setSimulate(randomBoolean()); - /** - * Creates a {@link XContentParser} with the given String while replacing single quote to double quotes. - */ - private XContentParser newParser(String s) throws IOException { - assertNotNull(s); - return createParser(JsonXContent.jsonXContent, s.replace("'", "\"")); + request.setRequest(RandomSearchRequestGenerator.randomSearchRequest( + SearchSourceBuilder::searchSource)); + return request; } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestXContentTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestXContentTests.java new file mode 100644 index 0000000000000..0e9e8ca628975 --- /dev/null +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestXContentTests.java @@ -0,0 +1,197 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.mustache; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.nullValue; + +public class SearchTemplateRequestXContentTests extends AbstractXContentTestCase { + + @Override + public SearchTemplateRequest createTestInstance() { + return SearchTemplateRequestTests.createRandomRequest(); + } + + @Override + protected SearchTemplateRequest doParseInstance(XContentParser parser) throws IOException { + return SearchTemplateRequest.fromXContent(parser); + } + + /** + * Note that when checking equality for xContent parsing, we omit two parts of the request: + * - The 'simulate' option, since this parameter is not included in the + * request's xContent (it's instead used to determine the request endpoint). + * - The random SearchRequest, since this component only affects the request + * parameters and also isn't captured in the request's xContent. + */ + @Override + protected void assertEqualInstances(SearchTemplateRequest expectedInstance, SearchTemplateRequest newInstance) { + assertTrue( + expectedInstance.isExplain() == newInstance.isExplain() && + expectedInstance.isProfile() == newInstance.isProfile() && + expectedInstance.getScriptType() == newInstance.getScriptType() && + Objects.equals(expectedInstance.getScript(), newInstance.getScript()) && + Objects.equals(expectedInstance.getScriptParams(), newInstance.getScriptParams())); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + public void testToXContentWithInlineTemplate() throws IOException { + SearchTemplateRequest request = new SearchTemplateRequest(); + + request.setScriptType(ScriptType.INLINE); + request.setScript("{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }"); + request.setProfile(true); + + Map scriptParams = new HashMap<>(); + scriptParams.put("my_field", "foo"); + scriptParams.put("my_value", "bar"); + request.setScriptParams(scriptParams); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) + .startObject() + .field("source", "{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }") + .startObject("params") + .field("my_field", "foo") + .field("my_value", "bar") + .endObject() + .field("explain", false) + .field("profile", true) + .endObject(); + + XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); + request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent(BytesReference.bytes(expectedRequest), + BytesReference.bytes(actualRequest), + contentType); + } + + public void testToXContentWithStoredTemplate() throws IOException { + SearchTemplateRequest request = new SearchTemplateRequest(); + + request.setScriptType(ScriptType.STORED); + request.setScript("match_template"); + request.setExplain(true); + + Map params = new HashMap<>(); + params.put("my_field", "foo"); + params.put("my_value", "bar"); + request.setScriptParams(params); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) + .startObject() + .field("id", "match_template") + .startObject("params") + .field("my_field", "foo") + .field("my_value", "bar") + .endObject() + .field("explain", true) + .field("profile", false) + .endObject(); + + XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); + request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent( + BytesReference.bytes(expectedRequest), + BytesReference.bytes(actualRequest), + contentType); + } + + public void testFromXContentWithEmbeddedTemplate() throws Exception { + String source = "{" + + " 'source' : {\n" + + " 'query': {\n" + + " 'terms': {\n" + + " 'status': [\n" + + " '{{#status}}',\n" + + " '{{.}}',\n" + + " '{{/status}}'\n" + + " ]\n" + + " }\n" + + " }\n" + + " }" + + "}"; + + SearchTemplateRequest request = SearchTemplateRequest.fromXContent(newParser(source)); + assertThat(request.getScript(), equalTo("{\"query\":{\"terms\":{\"status\":[\"{{#status}}\",\"{{.}}\",\"{{/status}}\"]}}}")); + assertThat(request.getScriptType(), equalTo(ScriptType.INLINE)); + assertThat(request.getScriptParams(), nullValue()); + } + + public void testFromXContentWithEmbeddedTemplateAndParams() throws Exception { + String source = "{" + + " 'source' : {" + + " 'query': { 'match' : { '{{my_field}}' : '{{my_value}}' } }," + + " 'size' : '{{my_size}}'" + + " }," + + " 'params' : {" + + " 'my_field' : 'foo'," + + " 'my_value' : 'bar'," + + " 'my_size' : 5" + + " }" + + "}"; + + SearchTemplateRequest request = SearchTemplateRequest.fromXContent(newParser(source)); + assertThat(request.getScript(), equalTo("{\"query\":{\"match\":{\"{{my_field}}\":\"{{my_value}}\"}},\"size\":\"{{my_size}}\"}")); + assertThat(request.getScriptType(), equalTo(ScriptType.INLINE)); + assertThat(request.getScriptParams().size(), equalTo(3)); + assertThat(request.getScriptParams(), hasEntry("my_field", "foo")); + assertThat(request.getScriptParams(), hasEntry("my_value", "bar")); + assertThat(request.getScriptParams(), hasEntry("my_size", 5)); + } + + public void testFromXContentWithMalformedRequest() { + // Unclosed template id + expectThrows(XContentParseException.class, () -> SearchTemplateRequest.fromXContent(newParser("{'id' : 'another_temp }"))); + } + + /** + * Creates a {@link XContentParser} with the given String while replacing single quote to double quotes. + */ + private XContentParser newParser(String s) throws IOException { + assertNotNull(s); + return createParser(JsonXContent.jsonXContent, s.replace("'", "\"")); + } +} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java new file mode 100644 index 0000000000000..53f5d1d8f842e --- /dev/null +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -0,0 +1,211 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.mustache; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.function.Predicate; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class SearchTemplateResponseTests extends AbstractXContentTestCase { + + @Override + protected SearchTemplateResponse createTestInstance() { + SearchTemplateResponse response = new SearchTemplateResponse(); + if (randomBoolean()) { + response.setResponse(createSearchResponse()); + } else { + response.setSource(createSource()); + } + return response; + } + + @Override + protected SearchTemplateResponse doParseInstance(XContentParser parser) throws IOException { + return SearchTemplateResponse.fromXContent(parser); + } + + /** + * For simplicity we create a minimal response, as there is already a dedicated + * test class for search response parsing and serialization. + */ + private static SearchResponse createSearchResponse() { + long tookInMillis = randomNonNegativeLong(); + int totalShards = randomIntBetween(1, Integer.MAX_VALUE); + int successfulShards = randomIntBetween(0, totalShards); + int skippedShards = randomIntBetween(0, totalShards); + InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); + + return new SearchResponse(internalSearchResponse, null, totalShards, successfulShards, + skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + } + + private static BytesReference createSource() { + try { + XContentBuilder source = XContentFactory.jsonBuilder() + .startObject() + .startObject("query") + .startObject("match") + .field(randomAlphaOfLength(5), randomAlphaOfLength(10)) + .endObject() + .endObject() + .endObject(); + return BytesReference.bytes(source); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + String templateOutputField = SearchTemplateResponse.TEMPLATE_OUTPUT_FIELD.getPreferredName(); + return field -> field.equals(templateOutputField) || field.startsWith(templateOutputField + "."); + } + + /** + * Note that we can't rely on normal equals and hashCode checks, since {@link SearchResponse} doesn't + * currently implement equals and hashCode. Instead, we compare the template outputs for equality, + * and perform some sanity checks on the search response instances. + */ + @Override + protected void assertEqualInstances(SearchTemplateResponse expectedInstance, SearchTemplateResponse newInstance) { + assertNotSame(newInstance, expectedInstance); + + BytesReference expectedSource = expectedInstance.getSource(); + BytesReference newSource = newInstance.getSource(); + assertEquals(expectedSource == null, newSource == null); + if (expectedSource != null) { + try { + assertToXContentEquivalent(expectedSource, newSource, XContentType.JSON); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + assertEquals(expectedInstance.hasResponse(), newInstance.hasResponse()); + if (expectedInstance.hasResponse()) { + SearchResponse expectedResponse = expectedInstance.getResponse(); + SearchResponse newResponse = newInstance.getResponse(); + + assertEquals(expectedResponse.getHits().totalHits, newResponse.getHits().totalHits); + assertEquals(expectedResponse.getHits().getMaxScore(), newResponse.getHits().getMaxScore(), 0.0001); + } + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + public void testSourceToXContent() throws IOException { + SearchTemplateResponse response = new SearchTemplateResponse(); + + XContentBuilder source = XContentFactory.jsonBuilder() + .startObject() + .startObject("query") + .startObject("terms") + .field("status", new String[]{"pending", "published"}) + .endObject() + .endObject() + .endObject(); + response.setSource(BytesReference.bytes(source)); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) + .startObject() + .startObject("template_output") + .startObject("query") + .startObject("terms") + .field("status", new String[]{"pending", "published"}) + .endObject() + .endObject() + .endObject() + .endObject(); + + XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); + response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent( + BytesReference.bytes(expectedResponse), + BytesReference.bytes(actualResponse), + contentType); + } + + public void testSearchResponseToXContent() throws IOException { + SearchHit hit = new SearchHit(1, "id", new Text("type"), Collections.emptyMap()); + hit.score(2.0f); + SearchHit[] hits = new SearchHit[] { hit }; + + InternalSearchResponse internalSearchResponse = new InternalSearchResponse( + new SearchHits(hits, 100, 1.5f), null, null, null, false, null, 1); + SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, + 0, 0, 0, 0, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + + SearchTemplateResponse response = new SearchTemplateResponse(); + response.setResponse(searchResponse); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) + .startObject() + .field("took", 0) + .field("timed_out", false) + .startObject("_shards") + .field("total", 0) + .field("successful", 0) + .field("skipped", 0) + .field("failed", 0) + .endObject() + .startObject("hits") + .field("total", 100) + .field("max_score", 1.5F) + .startArray("hits") + .startObject() + .field("_type", "type") + .field("_id", "id") + .field("_score", 2.0F) + .endObject() + .endArray() + .endObject() + .endObject(); + + XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); + response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent( + BytesReference.bytes(expectedResponse), + BytesReference.bytes(actualResponse), + contentType); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index fa851e9c6d802..d534af5789448 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -82,7 +82,7 @@ private RandomSearchRequestGenerator() {} * @param randomSearchSourceBuilder builds a random {@link SearchSourceBuilder}. You can use * {@link #randomSearchSourceBuilder(Supplier, Supplier, Supplier, Supplier, Supplier)}. */ - public static SearchRequest randomSearchRequest(Supplier randomSearchSourceBuilder) throws IOException { + public static SearchRequest randomSearchRequest(Supplier randomSearchSourceBuilder) { SearchRequest searchRequest = new SearchRequest(); searchRequest.allowPartialSearchResults(true); if (randomBoolean()) { From 2cb71d0947947c94ccd78741283df6c966aad1db Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 15 May 2018 15:03:08 -0600 Subject: [PATCH 36/74] Refactor IndicesOptions to not be byte-based (#30586) * Refactor IndicesOptions to not be byte-based This refactors IndicesOptions to be enum/enummap based rather than using a byte as a bitmap for each of the options. This is necessary because we'd like to add additional options, but we ran out of bits. Backwards compatibility is kept for earlier versions so the option serialization does not change the options. Relates sort of to #30188 --- .../action/support/IndicesOptions.java | 307 ++++++++++++------ .../common/io/stream/StreamInput.java | 18 + .../common/io/stream/StreamOutput.java | 11 + .../ClusterSearchShardsRequestTests.java | 2 +- .../action/support/IndicesOptionsTests.java | 80 +++++ 5 files changed, 323 insertions(+), 95 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 64c26d6b94aa5..afcb73e9b1cbd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -19,12 +19,17 @@ package org.elasticsearch.action.support; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestRequest; import java.io.IOException; +import java.util.Collection; +import java.util.EnumSet; +import java.util.HashSet; import java.util.Map; +import java.util.Set; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue; @@ -35,41 +40,155 @@ */ public class IndicesOptions { - private static final IndicesOptions[] VALUES; + public enum WildcardStates { + OPEN, + CLOSED; - private static final byte IGNORE_UNAVAILABLE = 1; - private static final byte ALLOW_NO_INDICES = 2; - private static final byte EXPAND_WILDCARDS_OPEN = 4; - private static final byte EXPAND_WILDCARDS_CLOSED = 8; - private static final byte FORBID_ALIASES_TO_MULTIPLE_INDICES = 16; - private static final byte FORBID_CLOSED_INDICES = 32; - private static final byte IGNORE_ALIASES = 64; + public static final EnumSet NONE = EnumSet.noneOf(WildcardStates.class); - private static final byte STRICT_EXPAND_OPEN = 6; - private static final byte LENIENT_EXPAND_OPEN = 7; - private static final byte STRICT_EXPAND_OPEN_CLOSED = 14; - private static final byte STRICT_EXPAND_OPEN_FORBID_CLOSED = 38; - private static final byte STRICT_SINGLE_INDEX_NO_EXPAND_FORBID_CLOSED = 48; + public static EnumSet parseParameter(Object value, EnumSet defaultStates) { + if (value == null) { + return defaultStates; + } - static { - short max = 1 << 7; - VALUES = new IndicesOptions[max]; - for (short id = 0; id < max; id++) { - VALUES[id] = new IndicesOptions((byte)id); + Set states = new HashSet<>(); + String[] wildcards = nodeStringArrayValue(value); + for (String wildcard : wildcards) { + if ("open".equals(wildcard)) { + states.add(OPEN); + } else if ("closed".equals(wildcard)) { + states.add(CLOSED); + } else if ("none".equals(wildcard)) { + states.clear(); + } else if ("all".equals(wildcard)) { + states.add(OPEN); + states.add(CLOSED); + } else { + throw new IllegalArgumentException("No valid expand wildcard value [" + wildcard + "]"); + } + } + + return states.isEmpty() ? NONE : EnumSet.copyOf(states); } } - private final byte id; + public enum Option { + IGNORE_UNAVAILABLE, + IGNORE_ALIASES, + ALLOW_NO_INDICES, + FORBID_ALIASES_TO_MULTIPLE_INDICES, + FORBID_CLOSED_INDICES; + + public static final EnumSet