|
6 | 6 | package org.elasticsearch.xpack.ml; |
7 | 7 |
|
8 | 8 | import org.elasticsearch.ElasticsearchException; |
| 9 | +import org.elasticsearch.Version; |
9 | 10 | import org.elasticsearch.action.ActionListener; |
10 | 11 | import org.elasticsearch.action.support.PlainActionFuture; |
11 | 12 | import org.elasticsearch.client.Client; |
12 | 13 | import org.elasticsearch.cluster.ClusterState; |
13 | 14 | import org.elasticsearch.cluster.metadata.MetaData; |
| 15 | +import org.elasticsearch.cluster.node.DiscoveryNode; |
| 16 | +import org.elasticsearch.cluster.node.DiscoveryNodes; |
14 | 17 | import org.elasticsearch.cluster.service.ClusterService; |
15 | 18 | import org.elasticsearch.common.io.stream.BytesStreamOutput; |
| 19 | +import org.elasticsearch.common.io.stream.StreamInput; |
16 | 20 | import org.elasticsearch.common.settings.Settings; |
| 21 | +import org.elasticsearch.common.transport.TransportAddress; |
17 | 22 | import org.elasticsearch.common.xcontent.ToXContent; |
18 | 23 | import org.elasticsearch.common.xcontent.XContentBuilder; |
19 | 24 | import org.elasticsearch.common.xcontent.XContentFactory; |
|
46 | 51 | import java.util.Arrays; |
47 | 52 | import java.util.Collections; |
48 | 53 | import java.util.Date; |
| 54 | +import java.util.HashMap; |
| 55 | +import java.util.HashSet; |
49 | 56 | import java.util.List; |
| 57 | +import java.util.Map; |
| 58 | +import java.util.Set; |
50 | 59 |
|
51 | 60 | import static org.hamcrest.Matchers.equalTo; |
52 | 61 | import static org.hamcrest.Matchers.notNullValue; |
@@ -223,6 +232,49 @@ public void testUsage() throws Exception { |
223 | 232 | } |
224 | 233 | } |
225 | 234 |
|
| 235 | + public void testNodeCount() throws Exception { |
| 236 | + when(licenseState.isMachineLearningAllowed()).thenReturn(true); |
| 237 | + int nodeCount = randomIntBetween(1, 3); |
| 238 | + givenNodeCount(nodeCount); |
| 239 | + Settings.Builder settings = Settings.builder().put(commonSettings); |
| 240 | + settings.put("xpack.ml.enabled", true); |
| 241 | + MachineLearningFeatureSet featureSet = new MachineLearningFeatureSet(TestEnvironment.newEnvironment(settings.build()), |
| 242 | + clusterService, client, licenseState); |
| 243 | + |
| 244 | + PlainActionFuture<Usage> future = new PlainActionFuture<>(); |
| 245 | + featureSet.usage(future); |
| 246 | + XPackFeatureSet.Usage usage = future.get(); |
| 247 | + |
| 248 | + assertThat(usage.available(), is(true)); |
| 249 | + assertThat(usage.enabled(), is(true)); |
| 250 | + |
| 251 | + BytesStreamOutput out = new BytesStreamOutput(); |
| 252 | + usage.writeTo(out); |
| 253 | + XPackFeatureSet.Usage serializedUsage = new MachineLearningFeatureSetUsage(out.bytes().streamInput()); |
| 254 | + |
| 255 | + XContentSource source; |
| 256 | + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { |
| 257 | + serializedUsage.toXContent(builder, ToXContent.EMPTY_PARAMS); |
| 258 | + source = new XContentSource(builder); |
| 259 | + } |
| 260 | + assertThat(source.getValue("node_count"), equalTo(nodeCount)); |
| 261 | + |
| 262 | + BytesStreamOutput oldOut = new BytesStreamOutput(); |
| 263 | + oldOut.setVersion(Version.V_6_0_0); |
| 264 | + usage.writeTo(oldOut); |
| 265 | + StreamInput oldInput = oldOut.bytes().streamInput(); |
| 266 | + oldInput.setVersion(Version.V_6_0_0); |
| 267 | + XPackFeatureSet.Usage oldSerializedUsage = new MachineLearningFeatureSetUsage(oldInput); |
| 268 | + |
| 269 | + XContentSource oldSource; |
| 270 | + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { |
| 271 | + oldSerializedUsage.toXContent(builder, ToXContent.EMPTY_PARAMS); |
| 272 | + oldSource = new XContentSource(builder); |
| 273 | + } |
| 274 | + |
| 275 | + assertNull(oldSource.getValue("node_count")); |
| 276 | + } |
| 277 | + |
226 | 278 | public void testUsageGivenMlMetadataNotInstalled() throws Exception { |
227 | 279 | when(licenseState.isMachineLearningAllowed()).thenReturn(true); |
228 | 280 | Settings.Builder settings = Settings.builder().put(commonSettings); |
@@ -286,6 +338,37 @@ private void givenJobs(List<Job> jobs, List<GetJobsStatsAction.Response.JobStats |
286 | 338 | }).when(client).execute(same(GetJobsStatsAction.INSTANCE), any(), any()); |
287 | 339 | } |
288 | 340 |
|
| 341 | + private void givenNodeCount(int nodeCount) { |
| 342 | + DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(); |
| 343 | + for (int i = 0; i < nodeCount; i++) { |
| 344 | + Map<String, String> attrs = new HashMap<>(); |
| 345 | + attrs.put(MachineLearning.ML_ENABLED_NODE_ATTR, Boolean.toString(true)); |
| 346 | + Set<DiscoveryNode.Role> roles = new HashSet<>(); |
| 347 | + roles.add(DiscoveryNode.Role.DATA); |
| 348 | + roles.add(DiscoveryNode.Role.MASTER); |
| 349 | + roles.add(DiscoveryNode.Role.INGEST); |
| 350 | + nodesBuilder.add(new DiscoveryNode(randomAlphaOfLength(i+1), |
| 351 | + new TransportAddress(TransportAddress.META_ADDRESS, 9100 + i), |
| 352 | + attrs, |
| 353 | + roles, |
| 354 | + Version.CURRENT)); |
| 355 | + } |
| 356 | + for (int i = 0; i < randomIntBetween(1, 3); i++) { |
| 357 | + Map<String, String> attrs = new HashMap<>(); |
| 358 | + Set<DiscoveryNode.Role> roles = new HashSet<>(); |
| 359 | + roles.add(DiscoveryNode.Role.DATA); |
| 360 | + roles.add(DiscoveryNode.Role.MASTER); |
| 361 | + roles.add(DiscoveryNode.Role.INGEST); |
| 362 | + nodesBuilder.add(new DiscoveryNode(randomAlphaOfLength(i+1), |
| 363 | + new TransportAddress(TransportAddress.META_ADDRESS, 9300 + i), |
| 364 | + attrs, |
| 365 | + roles, |
| 366 | + Version.CURRENT)); |
| 367 | + } |
| 368 | + ClusterState clusterState = new ClusterState.Builder(ClusterState.EMPTY_STATE).nodes(nodesBuilder.build()).build(); |
| 369 | + when(clusterService.state()).thenReturn(clusterState); |
| 370 | + } |
| 371 | + |
289 | 372 | private void givenDatafeeds(List<GetDatafeedsStatsAction.Response.DatafeedStats> datafeedStats) { |
290 | 373 | doAnswer(invocationOnMock -> { |
291 | 374 | ActionListener<GetDatafeedsStatsAction.Response> listener = |
|
0 commit comments