diff --git a/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java b/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java index 4fd2512f2cbbe..981033aeccd8c 100644 --- a/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java +++ b/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java @@ -91,7 +91,7 @@ static Thread createShutdownHook(Terminal terminal, Closeable closeable) { try { closeable.close(); } catch (final IOException e) { - e.printStackTrace(terminal.getErrorWriter()); + terminal.errorPrintln(e); } terminal.flush(); // make sure to flush whatever the close or error might have written }, "elasticsearch-cli-shutdown"); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java index a6eb32cb1bb38..94c7653a08e0e 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java @@ -9,12 +9,14 @@ package org.elasticsearch.server.cli; import org.elasticsearch.bootstrap.BootstrapInfo; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cli.Terminal.Verbosity; import java.io.BufferedReader; +import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.concurrent.CountDownLatch; @@ -29,9 +31,9 @@ * {@link BootstrapInfo#SERVER_READY_MARKER} signals the server is ready and the cli may * detach if daemonizing. All other messages are passed through to stderr. */ -class ErrorPumpThread extends Thread { +class ErrorPumpThread extends Thread implements Closeable { private final BufferedReader reader; - private final PrintWriter writer; + private final Terminal terminal; // a latch which changes state when the server is ready or has had a bootstrap error private final CountDownLatch readyOrDead = new CountDownLatch(1); @@ -42,10 +44,24 @@ class ErrorPumpThread extends Thread { // an unexpected io failure that occurred while pumping stderr private volatile IOException ioFailure; - ErrorPumpThread(PrintWriter errOutput, InputStream errInput) { + ErrorPumpThread(Terminal terminal, InputStream errInput) { super("server-cli[stderr_pump]"); this.reader = new BufferedReader(new InputStreamReader(errInput, StandardCharsets.UTF_8)); - this.writer = errOutput; + this.terminal = terminal; + } + + private void checkForIoFailure() throws IOException { + IOException failure = ioFailure; + ioFailure = null; + if (failure != null) { + throw failure; + } + } + + @Override + public void close() throws IOException { + assert isAlive() == false : "Pump thread must be drained first"; + checkForIoFailure(); } /** @@ -56,9 +72,7 @@ class ErrorPumpThread extends Thread { */ boolean waitUntilReady() throws IOException { nonInterruptibleVoid(readyOrDead::await); - if (ioFailure != null) { - throw ioFailure; - } + checkForIoFailure(); return ready; } @@ -81,13 +95,13 @@ public void run() { ready = true; readyOrDead.countDown(); } else if (filter.contains(line) == false) { - writer.println(line); + terminal.errorPrintln(Verbosity.SILENT, line, false); } } } catch (IOException e) { ioFailure = e; } finally { - writer.flush(); + terminal.flush(); readyOrDead.countDown(); } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java index bf03acaf7a5da..0fddf76caff59 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java @@ -23,7 +23,7 @@ class KeystorePasswordTerminal extends Terminal implements Closeable { private final SecureString password; KeystorePasswordTerminal(Terminal delegate, SecureString password) { - super(delegate.getReader(), delegate.getWriter(), delegate.getErrorWriter()); + super(delegate); this.delegate = delegate; this.password = password; setVerbosity(delegate.getVerbosity()); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index 0505ab86127cf..7b904d4cb5a89 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -27,6 +27,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; @@ -231,7 +232,7 @@ private ServerArgs createArgs(OptionSet options, Environment env, SecureSettings } @Override - public void close() { + public void close() throws IOException { if (server != null) { server.stop(); } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java index fa948572e7675..35b5d93b39933 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java @@ -61,16 +61,21 @@ public long pid() { */ public synchronized void detach() throws IOException { errorPump.drain(); - IOUtils.close(jvmProcess.getOutputStream(), jvmProcess.getInputStream(), jvmProcess.getErrorStream()); - detached = true; + try { + IOUtils.close(jvmProcess.getOutputStream(), jvmProcess.getInputStream(), jvmProcess.getErrorStream(), errorPump); + } finally { + detached = true; + } } /** * Waits for the subprocess to exit. */ - public int waitFor() { + public int waitFor() throws IOException { errorPump.drain(); - return nonInterruptible(jvmProcess::waitFor); + int exitCode = nonInterruptible(jvmProcess::waitFor); + errorPump.close(); + return exitCode; } /** @@ -81,7 +86,7 @@ public int waitFor() { * *

Note that if {@link #detach()} has been called, this method is a no-op. */ - public synchronized void stop() { + public synchronized void stop() throws IOException { if (detached) { return; } @@ -93,7 +98,7 @@ public synchronized void stop() { /** * Stop the subprocess, sending a SIGKILL. */ - public void forceStop() { + public void forceStop() throws IOException { assert detached == false; jvmProcess.destroyForcibly(); waitFor(); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java index b90ac25f5d57d..fcc290ebe9e72 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java @@ -154,7 +154,7 @@ ServerProcess start(ProcessStarter processStarter) throws UserException { boolean success = false; try { jvmProcess = createProcess(getCommand(), getJvmArgs(), jvmOptions, getEnvironment(), processStarter); - errorPump = new ErrorPumpThread(terminal.getErrorWriter(), jvmProcess.getErrorStream()); + errorPump = new ErrorPumpThread(terminal, jvmProcess.getErrorStream()); errorPump.start(); sendArgs(serverArgs, jvmProcess.getOutputStream()); diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java index e469764590bd6..38a64a778fc27 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.List; import java.util.Locale; import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; @@ -43,8 +44,11 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.emptyString; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.matchesRegex; import static org.hamcrest.Matchers.not; public class ServerCliTests extends CommandTestCase { @@ -321,11 +325,16 @@ protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, throw new InterruptedException("interrupted while get jvm options"); } }; - var e = expectThrows( - InterruptedException.class, - () -> command.main(new String[0], terminal, new ProcessInfo(sysprops, envVars, esHomeDir)) - ); - assertThat(e.getMessage(), equalTo("interrupted while get jvm options")); + + int exitCode = command.main(new String[0], terminal, new ProcessInfo(sysprops, envVars, esHomeDir)); + assertThat(exitCode, is(ExitCodes.CODE_ERROR)); + + String[] lines = terminal.getErrorOutput().split(System.lineSeparator()); + assertThat(List.of(lines), hasSize(greaterThan(10))); // at least decent sized stacktrace + assertThat(lines[0], is("java.lang.InterruptedException: interrupted while get jvm options")); + assertThat(lines[1], matchesRegex("\\tat org.elasticsearch.server.cli.ServerCliTests.+startServer\\(ServerCliTests.java:\\d+\\)")); + assertThat(lines[lines.length - 1], matchesRegex("\tat java.base/java.lang.Thread.run\\(Thread.java:\\d+\\)")); + command.close(); } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java index b9f2eb73b30b5..dc36485fb77ab 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java @@ -38,6 +38,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -393,15 +394,24 @@ public void testWaitFor() throws Exception { stderr.println("final message"); }; var server = startProcess(false, false); + + CompletableFuture stopping = new CompletableFuture<>(); new Thread(() -> { - // simulate stop run as shutdown hook in another thread, eg from Ctrl-C - nonInterruptibleVoid(mainReady::await); - server.stop(); + try { + // simulate stop run as shutdown hook in another thread, eg from Ctrl-C + nonInterruptibleVoid(mainReady::await); + server.stop(); + stopping.complete(null); + } catch (Throwable e) { + stopping.completeExceptionally(e); + } }).start(); int exitCode = server.waitFor(); assertThat(process.main.isDone(), is(true)); assertThat(exitCode, equalTo(0)); assertThat(terminal.getErrorOutput(), containsString("final message")); + // rethrow any potential exception observed while stopping + stopping.get(); } public void testProcessDies() throws Exception { diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index 22474e63ab0df..66ae78470c55d 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -23,6 +23,8 @@ import org.elasticsearch.server.cli.ServerProcessBuilder; import org.elasticsearch.server.cli.ServerProcessUtils; +import java.io.IOException; + /** * Starts an Elasticsearch process, but does not wait for it to exit. *

@@ -55,7 +57,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } @Override - public void close() { + public void close() throws IOException { if (server != null) { server.stop(); } diff --git a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java index e4b651fcb77af..8f44eaa80f23a 100644 --- a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java +++ b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java @@ -22,6 +22,8 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; public class ProcrunCommandTests extends WindowsServiceCliTestCase { @@ -111,8 +113,10 @@ protected String getDefaultFailureMessage() { public void testMissingExe() throws Exception { Files.delete(serviceExe); - var e = expectThrows(IllegalStateException.class, () -> executeMain("install")); - assertThat(e.getMessage(), containsString("Missing procrun exe")); + int exitCode = executeMain("install"); + + assertThat(exitCode, is(ExitCodes.CODE_ERROR)); + assertThat(terminal.getErrorOutput(), startsWith("java.lang.IllegalStateException: Missing procrun exe")); } public void testServiceId() throws Exception { diff --git a/docs/changelog/106820.yaml b/docs/changelog/106820.yaml new file mode 100644 index 0000000000000..d854e3984c13d --- /dev/null +++ b/docs/changelog/106820.yaml @@ -0,0 +1,5 @@ +pr: 106820 +summary: Add a capabilities API to check node and cluster capabilities +area: Infra/REST API +type: feature +issues: [] diff --git a/docs/changelog/107088.yaml b/docs/changelog/107088.yaml new file mode 100644 index 0000000000000..01a926f185eea --- /dev/null +++ b/docs/changelog/107088.yaml @@ -0,0 +1,5 @@ +pr: 107088 +summary: Introduce role description field +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/107886.yaml b/docs/changelog/107886.yaml deleted file mode 100644 index a328bc2a2a208..0000000000000 --- a/docs/changelog/107886.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107886 -summary: Cluster state role mapper file settings service -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/107891.yaml b/docs/changelog/107891.yaml new file mode 100644 index 0000000000000..deb3fbd2258ff --- /dev/null +++ b/docs/changelog/107891.yaml @@ -0,0 +1,6 @@ +pr: 107891 +summary: Fix `startOffset` must be non-negative error in XLMRoBERTa tokenizer +area: Machine Learning +type: bug +issues: + - 104626 diff --git a/docs/changelog/108238.yaml b/docs/changelog/108238.yaml new file mode 100644 index 0000000000000..607979c2eb0ac --- /dev/null +++ b/docs/changelog/108238.yaml @@ -0,0 +1,6 @@ +pr: 108238 +summary: "Nativeaccess: try to load all located libsystemds" +area: Infra/Core +type: bug +issues: + - 107878 diff --git a/docs/changelog/108300.yaml b/docs/changelog/108300.yaml new file mode 100644 index 0000000000000..c4d6e468113a4 --- /dev/null +++ b/docs/changelog/108300.yaml @@ -0,0 +1,5 @@ +pr: 108300 +summary: "ESQL: Add more time span units" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/108340.yaml b/docs/changelog/108340.yaml new file mode 100644 index 0000000000000..fb2ea72c0a0f5 --- /dev/null +++ b/docs/changelog/108340.yaml @@ -0,0 +1,5 @@ +pr: 108340 +summary: "Apm-data: increase version for templates" +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/108349.yaml b/docs/changelog/108349.yaml new file mode 100644 index 0000000000000..6d9ea3d658dca --- /dev/null +++ b/docs/changelog/108349.yaml @@ -0,0 +1,6 @@ +pr: 108349 +summary: "Ecs@mappings: reduce scope for `ecs_geo_point`" +area: Data streams +type: bug +issues: + - 108338 diff --git a/docs/changelog/108365.yaml b/docs/changelog/108365.yaml new file mode 100644 index 0000000000000..d94486e2f3ea7 --- /dev/null +++ b/docs/changelog/108365.yaml @@ -0,0 +1,5 @@ +pr: 108365 +summary: "[Bugfix] Connector API - fix status serialisation issue in termquery" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/108379.yaml b/docs/changelog/108379.yaml new file mode 100644 index 0000000000000..312856a5db33d --- /dev/null +++ b/docs/changelog/108379.yaml @@ -0,0 +1,5 @@ +pr: 108379 +summary: Create a new `NodeRequest` for every `NodesDataTiersUsageTransport` use +area: Indices APIs +type: bug +issues: [] diff --git a/docs/changelog/108396.yaml b/docs/changelog/108396.yaml new file mode 100644 index 0000000000000..63937646b755c --- /dev/null +++ b/docs/changelog/108396.yaml @@ -0,0 +1,6 @@ +pr: 108396 +summary: "Apm-data: improve default pipeline performance" +area: Data streams +type: enhancement +issues: + - 108290 diff --git a/docs/changelog/108431.yaml b/docs/changelog/108431.yaml new file mode 100644 index 0000000000000..84607b1b99ac3 --- /dev/null +++ b/docs/changelog/108431.yaml @@ -0,0 +1,5 @@ +pr: 108431 +summary: "ESQL: Disable quoting in FROM command" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/108444.yaml b/docs/changelog/108444.yaml new file mode 100644 index 0000000000000..c946ab24f939a --- /dev/null +++ b/docs/changelog/108444.yaml @@ -0,0 +1,5 @@ +pr: 108444 +summary: "Apm-data: ignore malformed fields, and too many dynamic fields" +area: Data streams +type: enhancement +issues: [] diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index b8fb92b1ea15d..59305c6305737 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -10,70 +10,7 @@ ### ActionListener -Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code which -doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become available. -They support several useful control flows: - -- They can be completed immediately on the calling thread. -- They can be completed concurrently on a different thread. -- They can be stored in a data structure and completed later on when the system reaches a particular state. -- Most commonly, they can be passed on to other methods that themselves require a callback. -- They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run - before or after completion, before passing them on. - -`ActionListener` is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. `ActionListener` is -used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes it easier to compose -parts of the system together without needing to build adapters to convert back and forth between different kinds of callback. It also makes -it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely takes practice and is -certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with `ActionListener` instances -themselves, creating new instances out of existing ones and completing them in interesting ways. See for instance: - -- all the static methods on [ActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java) itself -- [`ThreadedActionListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java) for forking work elsewhere -- [`RefCountingListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java) for running work in parallel -- [`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) for constructing flexible workflows - -Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous code -without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too expensive to -waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means that most of our -code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes a callback. The -entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at -[`org.elasticsearch.rest.BaseRestHandler#prepareRequest`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java#L158-L171), -and transport APIs all start at -[`org.elasticsearch.action.support.TransportAction#doExecute`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/TransportAction.java#L65)) -and the whole system fundamentally works in terms of an event loop (a `io.netty.channel.EventLoop`) which processes network events via -callbacks. - -`ActionListener` is not an _ad-hoc_ invention. Formally speaking, it is our implementation of the general concept of a continuation in the -sense of [_continuation-passing style_](https://en.wikipedia.org/wiki/Continuation-passing_style) (CPS): an extra argument to a function -which defines how to continue the computation when the result is available. This is in contrast to _direct style_ which is the more usual -style of calling methods that return values directly back to the caller so they can continue executing as normal. There's essentially two -ways that computation can continue in Java (it can return a value or it can throw an exception) which is why `ActionListener` has both an -`onResponse()` and an `onFailure()` method. - -CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS also -enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in parallel, -perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be satisfied before -proceeding (e.g. -[`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) -amongst many others). Some languages have first-class support for continuations (e.g. the `async` and `await` primitives in C#) allowing the -programmer to write code in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all -the callbacks ourselves. - -Strictly speaking, CPS requires that a computation _only_ continues by calling the continuation. In Elasticsearch, this means that -asynchronous methods must have `void` return type and may not throw any exceptions. This is mostly the case in our code as written today, -and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In -particular, it's not uncommon to permit some methods to throw an exception, using things like -[`ActionListener#run`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java#L381-L390) -(or an equivalent `try ... catch ...` block) further up the stack to handle it. Some methods also take (and may complete) an -`ActionListener` parameter, but still return a value separately for other local synchronous work. - -This pattern is often used in the transport action layer with the use of the -[ChannelActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java) -class, which wraps a `TransportChannel` produced by the transport layer. `TransportChannel` implementations can hold a reference to a Netty -channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, so a -call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, barring -caller timeouts. +See the [Javadocs for `ActionListener`](https://github.com/elastic/elasticsearch/blob/main/server/src/main/java/org/elasticsearch/action/ActionListener.java) (TODO: add useful starter references and explanations for a range of Listener classes. Reference the Netty section.) @@ -133,6 +70,14 @@ are only used for internode operations/communications. ### Work Queues +### RestClient + +The `RestClient` is primarily used in testing, to send requests against cluster nodes in the same format as would users. There +are some uses of `RestClient`, via `RestClientBuilder`, in the production code. For example, remote reindex leverages the +`RestClient` internally as the REST client to the remote elasticsearch cluster, and to take advantage of the compatibility of +`RestClient` requests with much older elasticsearch versions. The `RestClient` is also used externally by the `Java API Client` +to communicate with Elasticsearch. + # Cluster Coordination (Sketch of important classes? Might inform more sections to add for details.) diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index e5c2db65778d8..9d784f530d63c 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -358,6 +358,8 @@ POST _aliases ---- // TEST[s/^/PUT my-index-2099.05.06-000001\n/] +NOTE: Filters are only applied when using the <>, and are not applied when <>. + [discrete] [[alias-routing]] === Routing diff --git a/docs/reference/esql/esql-commands.asciidoc b/docs/reference/esql/esql-commands.asciidoc index 708127718fe38..1f07361b89aac 100644 --- a/docs/reference/esql/esql-commands.asciidoc +++ b/docs/reference/esql/esql-commands.asciidoc @@ -39,7 +39,7 @@ image::images/esql/processing-command.svg[A processing command changing an input * <> * <> * <> -* <> +* experimental:[] <> * <> * <> * <> diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 663b2f8ecd249..b7928898a3bbb 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -1,12 +1,9 @@ [[esql-getting-started]] == Getting started with {esql} queries - ++++ Getting started ++++ -preview::["Do not use {esql} on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] - This guide shows how you can use {esql} to query and aggregate your data. [TIP] diff --git a/docs/reference/esql/esql-syntax.asciidoc b/docs/reference/esql/esql-syntax.asciidoc index c5d56ef15fdfd..c7f741d064310 100644 --- a/docs/reference/esql/esql-syntax.asciidoc +++ b/docs/reference/esql/esql-syntax.asciidoc @@ -160,14 +160,15 @@ Datetime intervals and timespans can be expressed using timespan literals. Timespan literals are a combination of a number and a qualifier. These qualifiers are supported: -* `millisecond`/`milliseconds` -* `second`/`seconds` -* `minute`/`minutes` -* `hour`/`hours` -* `day`/`days` -* `week`/`weeks` -* `month`/`months` -* `year`/`years` +* `millisecond`/`milliseconds`/`ms` +* `second`/`seconds`/`sec`/`s` +* `minute`/`minutes`/`min` +* `hour`/`hours`/`h` +* `day`/`days`/`d` +* `week`/`weeks`/`w` +* `month`/`months`/`mo` +* `quarter`/`quarters`/`q` +* `year`/`years`/`yr`/`y` Timespan literals are not whitespace sensitive. These expressions are all valid: diff --git a/docs/reference/esql/functions/description/date_diff.asciidoc b/docs/reference/esql/functions/description/date_diff.asciidoc index 3dd19b5885902..dbc03d59a2bf7 100644 --- a/docs/reference/esql/functions/description/date_diff.asciidoc +++ b/docs/reference/esql/functions/description/date_diff.asciidoc @@ -25,3 +25,9 @@ s|abbreviations | microsecond | microseconds, mcs | nanosecond | nanoseconds, ns |=== + +Note that while there is an overlap between the function's supported units and +{esql}'s supported time span literals, these sets are distinct and not +interchangeable. Similarly, the supported abbreviations are conveniently shared +with implementations of this function in other established products and not +necessarily common with the date-time nomenclature used by {es}. diff --git a/docs/reference/esql/index.asciidoc b/docs/reference/esql/index.asciidoc index 5cb02064dc794..54627a6de3c62 100644 --- a/docs/reference/esql/index.asciidoc +++ b/docs/reference/esql/index.asciidoc @@ -6,8 +6,6 @@ [partintro] -preview::["Do not use {esql} on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] - The {es} Query Language ({esql}) provides a powerful way to filter, transform, and analyze data stored in {es}, and in the future in other runtimes. It is designed to be easy to learn and use, by end users, SRE teams, application diff --git a/docs/reference/esql/processing-commands/mv_expand.asciidoc b/docs/reference/esql/processing-commands/mv_expand.asciidoc index 46dc4fd0a33cf..9e1cb5573c381 100644 --- a/docs/reference/esql/processing-commands/mv_expand.asciidoc +++ b/docs/reference/esql/processing-commands/mv_expand.asciidoc @@ -2,6 +2,8 @@ [[esql-mv_expand]] === `MV_EXPAND` +preview::[] + **Syntax** [source,esql] diff --git a/docs/reference/high-availability/cluster-design.asciidoc b/docs/reference/high-availability/cluster-design.asciidoc index 3f8e19b47d37a..6c17a494f36ae 100644 --- a/docs/reference/high-availability/cluster-design.asciidoc +++ b/docs/reference/high-availability/cluster-design.asciidoc @@ -7,14 +7,14 @@ nodes to take over their responsibilities, an {es} cluster can continue operating normally if some of its nodes are unavailable or disconnected. There is a limit to how small a resilient cluster can be. All {es} clusters -require: +require the following components to function: -- One <> node -- At least one node for each <>. -- At least one copy of every <>. +- One <> +- At least one node for each <> +- At least one copy of every <> A resilient cluster requires redundancy for every required cluster component. -This means a resilient cluster must have: +This means a resilient cluster must have the following components: - At least three master-eligible nodes - At least two nodes of each role @@ -375,11 +375,11 @@ The cluster will be resilient to the loss of any zone as long as: - There are at least two zones containing data nodes. - Every index that is not a <> has at least one replica of each shard, in addition to the primary. -- Shard allocation awareness is configured to avoid concentrating all copies of - a shard within a single zone. +- <> is configured to + avoid concentrating all copies of a shard within a single zone. - The cluster has at least three master-eligible nodes. At least two of these - nodes are not voting-only master-eligible nodes, and they are spread evenly - across at least three zones. + nodes are not <>, + and they are spread evenly across at least three zones. - Clients are configured to send their requests to nodes in more than one zone or are configured to use a load balancer that balances the requests across an appropriate set of nodes. The {ess-trial}[Elastic Cloud] service provides such diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png new file mode 100644 index 0000000000000..d5a3040cc5343 Binary files /dev/null and b/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png differ diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png new file mode 100644 index 0000000000000..ce2ce6b2a95e9 Binary files /dev/null and b/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png differ diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index e47304f1e1337..2057519719177 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -10,7 +10,7 @@ include::intro.asciidoc[] include::release-notes/highlights.asciidoc[] -include::getting-started.asciidoc[] +include::quickstart/index.asciidoc[] include::setup.asciidoc[] diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc index 2e678b929d296..89eb6e8559056 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc @@ -27,12 +27,7 @@ TIP: If you have created rules for specific {anomaly-jobs} and you want to monitor whether these jobs work as expected, {anomaly-jobs} health rules are ideal for this purpose. -In *{stack-manage-app} > {rules-ui}*, you can create both types of {ml} rules: - -[role="screenshot"] -image::images/ml-rule.png["Creating a new machine learning rule",500] -// NOTE: This is an autogenerated screenshot. Do not edit it directly. - +In *{stack-manage-app} > {rules-ui}*, you can create both types of {ml} rules. In the *{ml-app}* app, you can create only {anomaly-detect} alert rules; create them from the {anomaly-job} wizard after you start the job or from the {anomaly-job} list. diff --git a/docs/reference/ml/images/ml-rule.png b/docs/reference/ml/images/ml-rule.png deleted file mode 100644 index f7ebcb3716b81..0000000000000 Binary files a/docs/reference/ml/images/ml-rule.png and /dev/null differ diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index d447026fae293..9c6197f9ba40d 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -5,7 +5,7 @@ You can use custom node attributes as _awareness attributes_ to enable {es} to take your physical hardware configuration into account when allocating shards. If {es} knows which nodes are on the same physical server, in the same rack, or in the same zone, it can distribute the primary shard and its replica shards to -minimise the risk of losing all shard copies in the event of a failure. +minimize the risk of losing all shard copies in the event of a failure. When shard allocation awareness is enabled with the <> @@ -19,22 +19,27 @@ allocated in each location. If the number of nodes in each location is unbalanced and there are a lot of replicas, replica shards might be left unassigned. +TIP: Learn more about <>. + [[enabling-awareness]] ===== Enabling shard allocation awareness To enable shard allocation awareness: -. Specify the location of each node with a custom node attribute. For example, -if you want Elasticsearch to distribute shards across different racks, you might -set an awareness attribute called `rack_id` in each node's `elasticsearch.yml` -config file. +. Specify the location of each node with a custom node attribute. For example, +if you want Elasticsearch to distribute shards across different racks, you might +use an awareness attribute called `rack_id`. ++ +You can set custom attributes in two ways: + +- By editing the `elasticsearch.yml` config file: + [source,yaml] -------------------------------------------------------- node.attr.rack_id: rack_one -------------------------------------------------------- + -You can also set custom attributes when you start a node: +- Using the `-E` command line argument when you start a node: + [source,sh] -------------------------------------------------------- @@ -56,17 +61,33 @@ cluster.routing.allocation.awareness.attributes: rack_id <1> + You can also use the <> API to set or update -a cluster's awareness attributes. +a cluster's awareness attributes: ++ +[source,console] +-------------------------------------------------- +PUT /_cluster/settings +{ + "persistent" : { + "cluster.routing.allocation.awareness.attributes" : "rack_id" + } +} +-------------------------------------------------- With this example configuration, if you start two nodes with `node.attr.rack_id` set to `rack_one` and create an index with 5 primary shards and 1 replica of each primary, all primaries and replicas are -allocated across the two nodes. +allocated across the two node. + +.All primaries and replicas allocated across two nodes in the same rack +image::images/shard-allocation/shard-allocation-awareness-one-rack.png[All primaries and replicas are allocated across two nodes in the same rack] If you add two nodes with `node.attr.rack_id` set to `rack_two`, {es} moves shards to the new nodes, ensuring (if possible) that no two copies of the same shard are in the same rack. +.Primaries and replicas allocated across four nodes in two racks, with no two copies of the same shard in the same rack +image::images/shard-allocation/shard-allocation-awareness-two-racks.png[Primaries and replicas are allocated across four nodes in two racks with no two copies of the same shard in the same rack] + If `rack_two` fails and takes down both its nodes, by default {es} allocates the lost shard copies to nodes in `rack_one`. To prevent multiple copies of a particular shard from being allocated in the same location, you can diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/quickstart/getting-started.asciidoc similarity index 98% rename from docs/reference/getting-started.asciidoc rename to docs/reference/quickstart/getting-started.asciidoc index 2a5dbc2f0d031..6b3095e07f9d4 100644 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/quickstart/getting-started.asciidoc @@ -1,10 +1,9 @@ -[chapter] [[getting-started]] -= Quick start +== Quick start guide This guide helps you learn how to: -* install and run {es} and {kib} (using {ecloud} or Docker), +* Run {es} and {kib} (using {ecloud} or in a local Docker dev environment), * add simple (non-timestamped) dataset to {es}, * run basic searches. diff --git a/docs/reference/quickstart/index.asciidoc b/docs/reference/quickstart/index.asciidoc new file mode 100644 index 0000000000000..e517d039e620b --- /dev/null +++ b/docs/reference/quickstart/index.asciidoc @@ -0,0 +1,10 @@ +[[quickstart]] += Quickstart + +Get started quickly with {es}. + +* Learn how to run {es} (and {kib}) for <>. +* Follow our <> to add data to {es} and query it. + +include::run-elasticsearch-locally.asciidoc[] +include::getting-started.asciidoc[] diff --git a/docs/reference/quickstart/run-elasticsearch-locally.asciidoc b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc new file mode 100644 index 0000000000000..cfad434b890db --- /dev/null +++ b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc @@ -0,0 +1,177 @@ +[[run-elasticsearch-locally]] +== Run {es} locally in Docker (without security) +++++ +Local dev setup (Docker) +++++ + +[WARNING] +==== +*DO NOT USE THESE INSTRUCTIONS FOR PRODUCTION DEPLOYMENTS* + +The instructions on this page are for *local development only*. Do not use these instructions for production deployments, because they are not secure. +While this approach is convenient for experimenting and learning, you should never run the service in this way in a production environment. + +Refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a production environment, including using Docker. +==== + +The following commands help you very quickly spin up a single-node {es} cluster, together with {kib} in Docker. +Note that if you don't need the {kib} UI, you can skip those instructions. + +[discrete] +[[local-dev-why]] +=== When would I use this setup? + +Use this setup if you want to quickly spin up {es} (and {kib}) for local development or testing. + +For example you might: + +* Want to run a quick test to see how a feature works. +* Follow a tutorial or guide that requires an {es} cluster, like our <>. +* Experiment with the {es} APIs using different tools, like the Dev Tools Console, cURL, or an Elastic programming language client. +* Quickly spin up an {es} cluster to test an executable https://github.com/elastic/elasticsearch-labs/tree/main/notebooks#readme[Python notebook] locally. + +[discrete] +[[local-dev-prerequisites]] +=== Prerequisites + +If you don't have Docker installed, https://www.docker.com/products/docker-desktop[download and install Docker Desktop] for your operating system. + +[discrete] +[[local-dev-env-vars]] +=== Set environment variables + +Configure the following environment variables. + +[source,sh] +---- +export ELASTIC_PASSWORD="" # password for "elastic" username +export KIBANA_PASSWORD="" # Used _internally_ by Kibana, must be at least 6 characters long +---- + +[discrete] +[[local-dev-create-docker-network]] +=== Create a Docker network + +To run both {es} and {kib}, you'll need to create a Docker network: + +[source,sh] +---- +docker network create elastic-net +---- + +[discrete] +[[local-dev-run-es]] +=== Run {es} + +Start the {es} container with the following command: + +ifeval::["{release-state}"=="unreleased"] +WARNING: Version {version} has not yet been released. +No Docker image is currently available for {es} {version}. +endif::[] + +[source,sh,subs="attributes"] +---- +docker run -p 127.0.0.1:9200:9200 -d --name elasticsearch --network elastic-net \ + -e ELASTIC_PASSWORD=$ELASTIC_PASSWORD \ + -e "discovery.type=single-node" \ + -e "xpack.security.http.ssl.enabled=false" \ + -e "xpack.license.self_generated.type=trial" \ + {docker-image} +---- + +[discrete] +[[local-dev-run-kib]] +=== Run {kib} (optional) + +To run {kib}, you must first set the `kibana_system` password in the {es} container. + +[source,sh,subs="attributes"] +---- +# configure the Kibana password in the ES container +curl -u elastic:$ELASTIC_PASSWORD \ + -X POST \ + http://localhost:9200/_security/user/kibana_system/_password \ + -d '{"password":"'"$KIBANA_PASSWORD"'"}' \ + -H 'Content-Type: application/json' +---- +// NOTCONSOLE + +Start the {kib} container with the following command: + +ifeval::["{release-state}"=="unreleased"] +WARNING: Version {version} has not yet been released. +No Docker image is currently available for {es} {version}. +endif::[] + +[source,sh,subs="attributes"] +---- +docker run -p 127.0.0.1:5601:5601 -d --name kibana --network elastic-net \ + -e ELASTICSEARCH_URL=http://elasticsearch:9200 \ + -e ELASTICSEARCH_HOSTS=http://elasticsearch:9200 \ + -e ELASTICSEARCH_USERNAME=kibana_system \ + -e ELASTICSEARCH_PASSWORD=$KIBANA_PASSWORD \ + -e "xpack.security.enabled=false" \ + -e "xpack.license.self_generated.type=trial" \ + {kib-docker-image} +---- + +[NOTE] +==== +The service is started with a trial license. The trial license enables all features of Elasticsearch for a trial period of 30 days. After the trial period expires, the license is downgraded to a basic license, which is free forever. If you prefer to skip the trial and use the basic license, set the value of the `xpack.license.self_generated.type` variable to basic instead. For a detailed feature comparison between the different licenses, refer to our https://www.elastic.co/subscriptions[subscriptions page]. +==== + +[discrete] +[[local-dev-connecting-clients]] +== Connecting to {es} with language clients + +To connect to the {es} cluster from a language client, you can use basic authentication with the `elastic` username and the password you set in the environment variable. + +You'll use the following connection details: + +* **{es} endpoint**: `http://localhost:9200` +* **Username**: `elastic` +* **Password**: `$ELASTIC_PASSWORD` (Value you set in the environment variable) + +For example, to connect with the Python `elasticsearch` client: + +[source,python] +---- +import os +from elasticsearch import Elasticsearch + +username = 'elastic' +password = os.getenv('ELASTIC_PASSWORD') # Value you set in the environment variable + +client = Elasticsearch( + "http://localhost:9200", + basic_auth=(username, password) +) + +print(client.info()) +---- + +Here's an example curl command using basic authentication: + +[source,sh,subs="attributes"] +---- +curl -u elastic:$ELASTIC_PASSWORD \ + -X PUT \ + http://localhost:9200/my-new-index \ + -H 'Content-Type: application/json' +---- +// NOTCONSOLE + +[discrete] +[[local-dev-next-steps]] +=== Next steps + +Use our <> to learn the basics of {es}: how to add data and query it. + +[discrete] +[[local-dev-production]] +=== Moving to production + +This setup is not suitable for production use. For production deployments, we recommend using our managed service on Elastic Cloud. https://cloud.elastic.co/registration[Sign up for a free trial] (no credit card required). + +Otherwise, refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a self-managed production environment, including using Docker. \ No newline at end of file diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index dd264c0e5bcd2..a2a397c4efe65 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1062,8 +1062,8 @@ end::stats[] tag::stored_fields[] `stored_fields`:: -(Optional, Boolean) If `true`, retrieves the document fields stored in the -index rather than the document `_source`. Defaults to `false`. +(Optional, string) +A comma-separated list of <> to include in the response. end::stored_fields[] tag::sync[] diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 6bdfaab17a4d0..0d21f648ab58b 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -308,7 +308,8 @@ GET /_xpack/usage }, "rollup" : { "available" : true, - "enabled" : true + "enabled" : true, + ... }, "ilm" : { "policy_count" : 3, @@ -496,6 +497,7 @@ GET /_xpack/usage } ------------------------------------------------------------ // TESTRESPONSE[s/"security" : \{[^\}]*\},/"security" : $body.$_path,/] +// TESTRESPONSE[s/"rollup" : \{[^\}]*\},/"rollup" : $body.$_path,/] // TESTRESPONSE[s/"detectors" : \{[^\}]*\},/"detectors" : $body.$_path,/] // TESTRESPONSE[s/"model_size" : \{[^\}]*\},/"model_size" : $body.$_path,/] // TESTRESPONSE[s/"eql" : \{[^\}]*\},/"eql" : $body.$_path,/] diff --git a/docs/reference/search/search-your-data/cohere-es.asciidoc b/docs/reference/search/search-your-data/cohere-es.asciidoc new file mode 100644 index 0000000000000..751cfebca8c78 --- /dev/null +++ b/docs/reference/search/search-your-data/cohere-es.asciidoc @@ -0,0 +1,371 @@ +[[cohere-es]] +=== Tutorial: Using Cohere with {es} +++++ +Using Cohere with {es} +++++ + +The instructions in this tutorial shows you how to compute embeddings with +Cohere using the {infer} API and store them for efficient vector or hybrid +search in {es}. This tutorial will use the Python {es} client to perform the +operations. + +You'll learn how to: + +* create an {infer} endpoint for text embedding using the Cohere service, +* create the necessary index mapping for the {es} index, +* build an {infer} pipeline to ingest documents into the index together with the +embeddings, +* perform hybrid search on the data, +* rerank search results by using Cohere's rerank model, +* design a RAG system with Cohere's Chat API. + +The tutorial uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set. + +Refer to https://docs.cohere.com/docs/elasticsearch-and-cohere[Cohere's tutorial] +for an example using a different data set. + + +[discrete] +[[cohere-es-req]] +==== Requirements + +* A https://cohere.com/[Cohere account], +* an https://www.elastic.co/guide/en/cloud/current/ec-getting-started.html[Elastic Cloud] +account, +* Python 3.7 or higher. + + +[discrete] +[[cohere-es-packages]] +==== Istall required packages + +Install {es} and Cohere: + +[source,py] +------------------------------------------------------------ +!pip install elasticsearch +!pip install cohere +------------------------------------------------------------ + +Import the required packages: + +[source,py] +------------------------------------------------------------ +from elasticsearch import Elasticsearch, helpers +import cohere +import json +import requests +------------------------------------------------------------ + +[discrete] +[[cohere-es-client]] +==== Create the {es} client + +To create your {es} client, you need: +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#finding-your-cloud-id[your Cloud ID], +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#creating-an-api-key[an encoded API key]. + +[source,py] +------------------------------------------------------------ +ELASTICSEARCH_ENDPOINT = "elastic_endpoint" +ELASTIC_API_KEY = "elastic_api_key" + +client = Elasticsearch( + cloud_id=ELASTICSEARCH_ENDPOINT, + api_key=ELASTIC_API_KEY +) + +# Confirm the client has connected +print(client.info()) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-endpoint]] +==== Create the {infer} endpoint + +<> first. In this example, the +{infer} endpoint uses Cohere's `embed-english-v3.0` model and the +`embedding_type` is set to `byte`. + +[source,py] +------------------------------------------------------------ +COHERE_API_KEY = "cohere_api_key" + +client.inference.put_model( + task_type="text_embedding", + inference_id="cohere_embeddings", + body={ + "service": "cohere", + "service_settings": { + "api_key": COHERE_API_KEY, + "model_id": "embed-english-v3.0", + "embedding_type": "byte" + } + }, +) +------------------------------------------------------------ + +You can find your API keys in your Cohere dashboard under the +https://dashboard.cohere.com/api-keys[API keys section]. + + +[discrete] +[[cohere-es-index-mapping]] +==== Create the index mapping + +Create the index mapping for the index that will contain the embeddings. + +[source,py] +------------------------------------------------------------ +client.indices.create( + index="cohere-embeddings", + settings={"index": {"default_pipeline": "cohere_embeddings"}}, + mappings={ + "properties": { + "text_embedding": { + "type": "dense_vector", + "dims": 1024, + "element_type": "byte", + }, + "text": {"type": "text"}, + "id": {"type": "integer"}, + "title": {"type": "text"} + } + }, +) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-pipeline]] +==== Create the {infer} pipeline + +Now you have an {infer} endpoint and an index ready to store embeddings. The +next step is to create an <> with an +<> that will create the embeddings using +the {infer} endpoint and stores them in the index. + +[source,py] +-------------------------------------------------- +client.ingest.put_pipeline( + id="cohere_embeddings", + description="Ingest pipeline for Cohere inference.", + processors=[ + { + "inference": { + "model_id": "cohere_embeddings", + "input_output": { + "input_field": "text", + "output_field": "text_embedding", + }, + } + } + ], +) +-------------------------------------------------- + + +[discrete] +[[cohere-es-insert-documents]] +==== Prepare data and insert documents + +This example uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set that you can find on HuggingFace. + +[source,py] +-------------------------------------------------- +url = 'https://huggingface.co/datasets/mteb/scifact/raw/main/corpus.jsonl' + +# Fetch the JSONL data from the URL +response = requests.get(url) +response.raise_for_status() # Ensure noticing bad responses + +# Split the content by new lines and parse each line as JSON +data = [json.loads(line) for line in response.text.strip().split('\n') if line] +# Now data is a list of dictionaries + +# Change `_id` key to `id` as `_id` is a reserved key in Elasticsearch. +for item in data: + if '_id' in item: + item['id'] = item.pop('_id') + +# Prepare the documents to be indexed +documents = [] +for line in data: + data_dict = line + documents.append({ + "_index": "cohere-embeddings", + "_source": data_dict, + } + ) + +# Use the bulk endpoint to index +helpers.bulk(client, documents) + +print("Data ingestion completed, text embeddings generated!") +-------------------------------------------------- + +Your index is populated with the SciFact data and text embeddings for the text +field. + + +[discrete] +[[cohere-es-hybrid-search]] +==== Hybrid search + +Let's start querying the index! + +The code below performs a hybrid search. The `kNN` query computes the relevance +of search results based on vector similarity using the `text_embedding` field, +the lexical search query uses BM25 retrieval to compute keyword similarity on +the `title` and `text` fields. + +[source,py] +-------------------------------------------------- +query = "What is biosimilarity?" + +response = client.search( + index="cohere-embeddings", + size=100, + knn={ + "field": "text_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "cohere_embeddings", + "model_text": query, + } + }, + "k": 10, + "num_candidates": 50, + }, + query={ + "multi_match": { + "query": query, + "fields": ["text", "title"] + } + } +) + +raw_documents = response["hits"]["hits"] + +# Display the first 10 results +for document in raw_documents[0:10]: + print(f'Title: {document["_source"]["title"]}\nText: {document["_source"]["text"]}\n') + +# Format the documents for ranking +documents = [] +for hit in response["hits"]["hits"]: + documents.append(hit["_source"]["text"]) +-------------------------------------------------- + + +[discrete] +[[cohere-es-rerank-results]] +===== Rerank search results + +To combine the results more effectively, use +https://docs.cohere.com/docs/rerank-2[Cohere's Rerank v3] model through the +{infer} API to provide a more precise semantic reranking of the results. + +Create an {infer} endpoint with your Cohere API key and the used model name as +the `model_id` (`rerank-english-v3.0` in this example). + +[source,py] +-------------------------------------------------- +client.inference.put_model( + task_type="rerank", + inference_id="cohere_rerank", + body={ + "service": "cohere", + "service_settings":{ + "api_key": COHERE_API_KEY, + "model_id": "rerank-english-v3.0" + }, + "task_settings": { + "top_n": 10, + }, + } +) +-------------------------------------------------- + +Rerank the results using the new {infer} endpoint. + +[source,py] +-------------------------------------------------- +# Pass the query and the search results to the service +response = client.inference.inference( + inference_id="cohere_rerank", + body={ + "query": query, + "input": documents, + "task_settings": { + "return_documents": False + } + } +) + +# Reconstruct the input documents based on the index provided in the rereank response +ranked_documents = [] +for document in response.body["rerank"]: + ranked_documents.append({ + "title": raw_documents[int(document["index"])]["_source"]["title"], + "text": raw_documents[int(document["index"])]["_source"]["text"] + }) + +# Print the top 10 results +for document in ranked_documents[0:10]: + print(f"Title: {document['title']}\nText: {document['text']}\n") +-------------------------------------------------- + +The response is a list of documents in descending order of relevance. Each +document has a corresponding index that reflects the order of the documents when +they were sent to the {infer} endpoint. + + +[discrete] +[[cohere-es-rag]] +==== Retrieval Augmented Generation (RAG) with Cohere and {es} + +RAG is a method for generating text using additional information fetched from an +external data source. With the ranked results, you can build a RAG system on the +top of what you previously created by using +https://docs.cohere.com/docs/chat-api[Cohere's Chat API]. + +Pass in the retrieved documents and the query to receive a grounded response +using Cohere's newest generative model +https://docs.cohere.com/docs/command-r-plus[Command R+]. + +Then pass in the query and the documents to the Chat API, and print out the +response. + +[source,py] +-------------------------------------------------- +response = co.chat(message=query, documents=ranked_documents, model='command-r-plus') + +source_documents = [] +for citation in response.citations: + for document_id in citation.document_ids: + if document_id not in source_documents: + source_documents.append(document_id) + +print(f"Query: {query}") +print(f"Response: {response.text}") +print("Sources:") +for document in response.documents: + if document['id'] in source_documents: + print(f"{document['title']}: {document['text']}") + +-------------------------------------------------- + +The response will look similar to this: + +[source,consol-result] +-------------------------------------------------- +Query: What is biosimilarity? +Response: Biosimilarity is based on the comparability concept, which has been used successfully for several decades to ensure close similarity of a biological product before and after a manufacturing change. Over the last 10 years, experience with biosimilars has shown that even complex biotechnology-derived proteins can be copied successfully. +Sources: +Interchangeability of Biosimilars: A European Perspective: (...) +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/search/search-your-data/retrievers-overview.asciidoc b/docs/reference/search/search-your-data/retrievers-overview.asciidoc new file mode 100644 index 0000000000000..fdd984819558b --- /dev/null +++ b/docs/reference/search/search-your-data/retrievers-overview.asciidoc @@ -0,0 +1,207 @@ +[[retrievers-overview]] +== Retrievers + +// Will move to a top level "Retrievers and reranking" section once reranking is live + +preview::[] + +A retriever is an abstraction that was added to the Search API in *8.14.0*. +This abstraction enables the configuration of multi-stage retrieval +pipelines within a single `_search` call. This simplifies your search +application logic, because you no longer need to configure complex searches via +multiple {es} calls or implement additional client-side logic to +combine results from different queries. + +This document provides a general overview of the retriever abstraction. +For implementation details, including notable restrictions, check out the +<> in the `_search` API docs. + +[discrete] +[[retrievers-overview-types]] +=== Retriever types + +Retrievers come in various types, each tailored for different search operations. +The following retrievers are currently available: + +* <>. Returns top documents from a +traditional https://www.elastic.co/guide/en/elasticsearch/reference/master/query-dsl.html[query]. +Mimics a traditional query but in the context of a retriever framework. This +ensures backward compatibility as existing `_search` requests remain supported. +That way you can transition to the new abstraction at your own pace without +mixing syntaxes. +* <>. Returns top documents from a <>, +in the context of a retriever framework. +* <>. Combines and ranks multiple first-stage retrievers using +the reciprocal rank fusion (RRF) algorithm. Allows you to combine multiple result sets +with different relevance indicators into a single result set. +An RRF retriever is a *compound retriever*, where its `filter` element is +propagated to its sub retrievers. ++ +Sub retrievers may not use elements that +are restricted by having a compound retriever as part of the retriever tree. +See the <> for detailed +examples and information on how to use the RRF retriever. + +[NOTE] +==== +Stay tuned for more retriever types in future releases! +==== + +[discrete] +=== What makes retrievers useful? + +Here's an overview of what makes retrievers useful and how they differ from +regular queries. + +. *Simplified user experience*. Retrievers simplify the user experience by +allowing entire retrieval pipelines to be configured in a single API call. This +maintains backward compatibility with traditional query elements by +automatically translating them to the appropriate retriever. +. *Structured retrieval*. Retrievers provide a more structured way to define search +operations. They allow searches to be described using a "retriever tree", a +hierarchical structure that clarifies the sequence and logic of operations, +making complex searches more understandable and manageable. +. *Composability and flexibility*. Retrievers enable flexible composability, +allowing you to build pipelines and seamlessly integrate different retrieval +strategies into these pipelines. Retrievers make it easy to test out different +retrieval strategy combinations. +. *Compound operations*. A retriever can have sub retrievers. This +allows complex nested searches where the results of one retriever feed into +another, supporting sophisticated querying strategies that might involve +multiple stages or criteria. +. *Retrieval as a first-class concept*. Unlike +traditional queries, where the query is a part of a larger search API call, +retrievers are designed as standalone entities that can be combined or used in +isolation. This enables a more modular and flexible approach to constructing +searches. +. *Enhanced control over document scoring and ranking*. Retrievers +allow for more explicit control over how documents are scored and filtered. For +instance, you can specify minimum score thresholds, apply complex filters +without affecting scoring, and use parameters like `terminate_after` for +performance optimizations. +. *Integration with existing {es} functionalities*. Even though +retrievers can be used instead of existing `_search` API syntax (like the +`query` and `knn`), they are designed to integrate seamlessly with things like +pagination (`search_after`) and sorting. They also maintain compatibility with +aggregation operations by treating the combination of all leaf retrievers as +`should` clauses in a boolean query. +. *Cleaner separation of concerns*. When using compound retrievers, only the +query element is allowed, which enforces a cleaner separation of concerns +and prevents the complexity that might arise from overly nested or +interdependent configurations. + +[discrete] +[[retrievers-overview-example]] +=== Example + +The following example demonstrates how using retrievers +simplify the composability of queries for RRF ranking. + +[source,js] +---- +GET example-index/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "text_expansion": { + "vector.tokens": { + "model_id": ".elser_model_2", + "model_text": "What blue shoes are on sale?" + } + } + } + } + }, + { + "standard": { + "query": { + "match": { + "text": "blue shoes sale" + } + } + } + } + ] + } + } +} +---- +//NOTCONSOLE + +This example demonstrates how you can combine different +retrieval strategies into a single `retriever` pipeline. + +Compare to `RRF` with `sub_searches` approach: + +.*Expand* for example +[%collapsible] +============== + +[source,js] +---- +GET example-index/_search +{ + "sub_searches":[ + { + "query":{ + "match":{ + "text":"blue shoes sale" + } + } + }, + { + "query":{ + "text_expansion":{ + "vector.tokens":{ + "model_id":".elser_model_2", + "model_text":"What blue shoes are on sale?" + } + } + } + } + ], + "rank":{ + "rrf":{ + "window_size":50, + "rank_constant":20 + } + } +} +---- +//NOTCONSOLE +============== + +[discrete] +[[retrievers-overview-glossary]] +=== Glossary + +Here are some important terms: + +* *Retrieval Pipeline*. Defines the entire retrieval and ranking logic to +produce top hits. +* *Retriever Tree*. A hierarchical structure that defines how retrievers interact. +* *First-stage Retriever*. Returns an initial set of candidate documents. +* *Compound Retriever*. Builds on one or more retrievers, +enhancing document retrieval and ranking logic. +* *Combiners*. Compound retrievers that merge top hits +from multiple sub-retrievers. +//* NOT YET *Rerankers*. Special compound retrievers that reorder hits and may adjust the number of hits, with distinctions between first-stage and second-stage rerankers. + +[discrete] +[[retrievers-overview-play-in-search]] +=== Retrievers in action + +The Search Playground builds Elasticsearch queries using the retriever abstraction. +It automatically detects the fields and types in your index and builds a retriever tree based on your selections. + +You can use the Playground to experiment with different retriever configurations and see how they affect search results. + +Refer to the {kibana-ref}/playground.html[Playground documentation] for more information. +// Content coming in https://github.com/elastic/kibana/pull/182692 + + + diff --git a/docs/reference/search/search-your-data/search-your-data.asciidoc b/docs/reference/search/search-your-data/search-your-data.asciidoc index bed204985296c..e1c1618410f2f 100644 --- a/docs/reference/search/search-your-data/search-your-data.asciidoc +++ b/docs/reference/search/search-your-data/search-your-data.asciidoc @@ -43,10 +43,11 @@ DSL, with a simplified user experience. Create search applications based on your results directly in the Kibana Search UI. include::search-api.asciidoc[] -include::search-application-overview.asciidoc[] include::knn-search.asciidoc[] include::semantic-search.asciidoc[] +include::retrievers-overview.asciidoc[] include::learning-to-rank.asciidoc[] include::search-across-clusters.asciidoc[] include::search-with-synonyms.asciidoc[] +include::search-application-overview.asciidoc[] include::behavioral-analytics/behavioral-analytics-overview.asciidoc[] diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index a4d892c98645b..a1197e7bbbd3a 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -136,3 +136,4 @@ include::{es-ref-dir}/tab-widgets/semantic-search/hybrid-search-widget.asciidoc[ include::semantic-search-elser.asciidoc[] include::semantic-search-inference.asciidoc[] +include::cohere-es.asciidoc[] diff --git a/docs/reference/settings/inference-settings.asciidoc b/docs/reference/settings/inference-settings.asciidoc new file mode 100644 index 0000000000000..fa0905cf0ef73 --- /dev/null +++ b/docs/reference/settings/inference-settings.asciidoc @@ -0,0 +1,92 @@ + +[role="xpack"] +[[inference-settings]] +=== Inference API settings in {es} +++++ +Inference settings +++++ + +[[inference-settings-description]] +// tag::inference-settings-description-tag[] +You do not need to configure any settings to use the {infer} APIs. Each setting has a default. +// end::inference-settings-description-tag[] + +[discrete] +[[xpack-inference-logging]] +// tag::inference-logging[] +==== Inference API logging settings + +When certain failures occur, a log message is emitted. In the case of a +reoccurring failure the logging throttler restricts repeated messages from being logged. + +`xpack.inference.logging.reset_interval`:: +(<>) Specifies the interval for when a cleanup thread will clear an internal +cache of the previously logged messages. Defaults to one day (`1d`). + +`xpack.inference.logging.wait_duration`:: +(<>) Specifies the amount of time to wait after logging a message before that +message can be logged again. Defaults to one hour (`1h`). +// end::inference-logging[] + +[[xpack-inference-http-settings]] +// tag::inference-http-settings[] +==== {infer-cap} API HTTP settings + +`xpack.inference.http.max_response_size`:: +(<>) Specifies the maximum size in bytes an HTTP response is allowed to have, +defaults to `10mb`, the maximum configurable value is `50mb`. + +`xpack.inference.http.max_total_connections`:: +(<>) Specifies the maximum number of connections the internal connection pool can +lease. Defaults to `50`. + +`xpack.inference.http.max_route_connections`:: +(<>) Specifies the maximum number of connections a single route can lease from +the internal connection pool. If this setting is set to a value equal to or greater than +`xpack.inference.http.max_total_connections`, then a single third party service could lease all available +connections and other third party services would be unable to lease connections. Defaults to `20`. + +`xpack.inference.http.connection_eviction_interval`:: +(<>) Specifies the interval that an eviction thread will run to remove expired and +stale connections from the internal connection pool. Decreasing this time value can help improve throughput if +multiple third party service are contending for the available connections in the pool. Defaults to one minute (`1m`). + +`xpack.inference.http.connection_eviction_max_idle_time`:: +(<>) Specifies the maximum duration a connection can be unused before it is marked as +idle and can be closed and removed from the shared connection pool. Defaults to one minute (`1m`). + +`xpack.inference.http.request_executor.queue_capacity`:: +(<>) Specifies the size of the internal queue for requests waiting to be sent. If +the queue is full and a request is sent to the {infer} API, it will be rejected. Defaults to `2000`. + +[[xpack-inference-http-retry-settings]] +==== {infer-cap} API HTTP Retry settings + +When a third-party service returns a transient failure code (for example, 429), the request is retried by the {infer} +API. These settings govern the retry behavior. When a request is retried, exponential backoff is used. + +`xpack.inference.http.retry.initial_delay`:: +(<>) Specifies the initial delay before retrying a request. Defaults to one second +(`1s`). + +`xpack.inference.http.retry.max_delay_bound`:: +(<>) Specifies the maximum delay for a request. Defaults to five seconds (`5s`). + +`xpack.inference.http.retry.timeout`:: +(<>) Specifies the maximum amount of time a request can be retried. +Once the request exceeds this time, the request will no longer be retried and a failure will be returned. +Defaults to 30 seconds (`30s`). +// end::inference-logging[] + +[[xpack-inference-input-text]] +// tag::inference-input-text[] +==== {infer-cap} API Input text + +For certain third-party service integrations, when the service returns an error indicating that the request +input was too large, the input will be truncated and the request is retried. These settings govern +how the truncation is performed. + +`xpack.inference.truncator.reduction_percentage`:: +(<>) Specifies the percentage to reduce the input text by if the 3rd party service +responds with an error indicating it is too long. Defaults to 50 percent (`0.5`). +// end::inference-input-text[] diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index e007b67a943b0..64626aafb2441 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -29,8 +29,6 @@ resource-heavy {ls} deployment should be on its own host. include::setup/install.asciidoc[] -include::setup/run-elasticsearch-locally.asciidoc[] - include::setup/configuration.asciidoc[] include::setup/important-settings.asciidoc[] @@ -70,6 +68,8 @@ include::setup/logging-config.asciidoc[] include::settings/ml-settings.asciidoc[] +include::settings/inference-settings.asciidoc[] + include::settings/monitoring-settings.asciidoc[] include::modules/node.asciidoc[] diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index 49501c46b8ba9..89373d0ce8d44 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -20,7 +20,7 @@ If you want to install and manage {es} yourself, you can: * Run {es} in a <>. * Set up and manage {es}, {kib}, {agent}, and the rest of the Elastic Stack on Kubernetes with {eck-ref}[{eck}]. -TIP: To try out Elasticsearch on your own machine, we recommend using Docker and running both Elasticsearch and Kibana. For more information, see <>. +TIP: To try out Elasticsearch on your own machine, we recommend using Docker and running both Elasticsearch and Kibana. For more information, see <>. Please note that this setup is *not suitable for production use*. [discrete] [[elasticsearch-install-packages]] diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 0c518d520bdd5..370fc5c4ccf7e 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -8,6 +8,12 @@ https://github.com/elastic/elasticsearch/blob/{branch}/distribution/docker[GitHu include::license.asciidoc[] +[TIP] +==== +If you just want to test {es} in local development, refer to <>. +Please note that this setup is not suitable for production environments. +==== + [[docker-cli-run-dev-mode]] ==== Run {es} in Docker diff --git a/docs/reference/setup/run-elasticsearch-locally.asciidoc b/docs/reference/setup/run-elasticsearch-locally.asciidoc deleted file mode 100644 index a6e6d5c8963a2..0000000000000 --- a/docs/reference/setup/run-elasticsearch-locally.asciidoc +++ /dev/null @@ -1,183 +0,0 @@ -[[run-elasticsearch-locally]] -== Run Elasticsearch locally - -//// -IMPORTANT: This content is replicated in the Elasticsearch repo -README.ascidoc file. If you make changes, you must also update the -Elasticsearch README. -+ -GitHub renders the tagged region directives when you view the README, -so it's not possible to just include the content from the README. Darn. -+ -Also note that there are similar instructions in the Kibana guide: -https://www.elastic.co/guide/en/kibana/current/docker.html -//// - -To try out Elasticsearch on your own machine, we recommend using Docker -and running both Elasticsearch and Kibana. -Docker images are available from the https://www.docker.elastic.co[Elastic Docker registry]. - -NOTE: Starting in Elasticsearch 8.0, security is enabled by default. -The first time you start Elasticsearch, TLS encryption is configured automatically, -a password is generated for the `elastic` user, -and a Kibana enrollment token is created so you can connect Kibana to your secured cluster. - -For other installation options, see the -https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Elasticsearch installation documentation]. - -[discrete] -=== Start Elasticsearch - -. Install and start https://www.docker.com/products/docker-desktop[Docker -Desktop]. Go to **Preferences > Resources > Advanced** and set Memory to at least 4GB. - -. Start an Elasticsearch container: -ifeval::["{release-state}"=="unreleased"] -+ -WARNING: Version {version} of {es} has not yet been released, so no -Docker image is currently available for this version. -endif::[] -+ -[source,sh,subs="attributes"] ----- -docker network create elastic -docker pull docker.elastic.co/elasticsearch/elasticsearch:{version} -docker run --name elasticsearch --net elastic -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -t docker.elastic.co/elasticsearch/elasticsearch:{version} ----- -+ -When you start Elasticsearch for the first time, the generated `elastic` user password and -Kibana enrollment token are output to the terminal. -+ -NOTE: You might need to scroll back a bit in the terminal to view the password -and enrollment token. - -. Copy the generated password and enrollment token and save them in a secure -location. These values are shown only when you start Elasticsearch for the first time. -You'll use these to enroll Kibana with your Elasticsearch cluster and log in. - -[discrete] -=== Start Kibana - -Kibana enables you to easily send requests to Elasticsearch and analyze, visualize, and manage data interactively. - -. In a new terminal session, start Kibana and connect it to your Elasticsearch container: -ifeval::["{release-state}"=="unreleased"] -+ -WARNING: Version {version} of {kib} has not yet been released, so no -Docker image is currently available for this version. -endif::[] -+ -[source,sh,subs="attributes"] ----- -docker pull docker.elastic.co/kibana/kibana:{version} -docker run --name kibana --net elastic -p 5601:5601 docker.elastic.co/kibana/kibana:{version} ----- -+ -When you start Kibana, a unique URL is output to your terminal. - -. To access Kibana, open the generated URL in your browser. - - .. Paste the enrollment token that you copied when starting - Elasticsearch and click the button to connect your Kibana instance with Elasticsearch. - - .. Log in to Kibana as the `elastic` user with the password that was generated - when you started Elasticsearch. - -[discrete] -=== Send requests to Elasticsearch - -You send data and other requests to Elasticsearch through REST APIs. -You can interact with Elasticsearch using any client that sends HTTP requests, -such as the https://www.elastic.co/guide/en/elasticsearch/client/index.html[Elasticsearch -language clients] and https://curl.se[curl]. -Kibana's developer console provides an easy way to experiment and test requests. -To access the console, go to **Management > Dev Tools**. - -[discrete] -=== Add data - -You index data into Elasticsearch by sending JSON objects (documents) through the REST APIs. -Whether you have structured or unstructured text, numerical data, or geospatial data, -Elasticsearch efficiently stores and indexes it in a way that supports fast searches. - -For timestamped data such as logs and metrics, you typically add documents to a -data stream made up of multiple auto-generated backing indices. - -To add a single document to an index, submit an HTTP post request that targets the index. - -[source,console] ----- -POST /customer/_doc/1 -{ - "firstname": "Jennifer", - "lastname": "Walters" -} ----- - -This request automatically creates the `customer` index if it doesn't exist, -adds a new document that has an ID of 1, and -stores and indexes the `firstname` and `lastname` fields. - -The new document is available immediately from any node in the cluster. -You can retrieve it with a GET request that specifies its document ID: - -[source,console] ----- -GET /customer/_doc/1 ----- -// TEST[continued] - -To add multiple documents in one request, use the `_bulk` API. -Bulk data must be newline-delimited JSON (NDJSON). -Each line must end in a newline character (`\n`), including the last line. - -[source,console] ----- -PUT customer/_bulk -{ "create": { } } -{ "firstname": "Monica","lastname":"Rambeau"} -{ "create": { } } -{ "firstname": "Carol","lastname":"Danvers"} -{ "create": { } } -{ "firstname": "Wanda","lastname":"Maximoff"} -{ "create": { } } -{ "firstname": "Jennifer","lastname":"Takeda"} ----- -// TEST[continued] - -[discrete] -=== Search - -Indexed documents are available for search in near real-time. -The following search matches all customers with a first name of _Jennifer_ -in the `customer` index. - -[source,console] ----- -GET customer/_search -{ - "query" : { - "match" : { "firstname": "Jennifer" } - } -} ----- -// TEST[continued] - -[discrete] -=== Explore - -You can use Discover in Kibana to interactively search and filter your data. -From there, you can start creating visualizations and building and sharing dashboards. - -To get started, create a _data view_ that connects to one or more Elasticsearch indices, -data streams, or index aliases. - -. Go to **Management > Stack Management > Kibana > Data Views**. -. Select **Create data view**. -. Enter a name for the data view and a pattern that matches one or more indices, -such as _customer_. -. Select **Save data view to Kibana**. - -To start exploring, go to **Analytics > Discover**. - - diff --git a/docs/reference/tab-widgets/api-call-widget.asciidoc b/docs/reference/tab-widgets/api-call-widget.asciidoc index adc2aa86f1c0e..4ad3c45366434 100644 --- a/docs/reference/tab-widgets/api-call-widget.asciidoc +++ b/docs/reference/tab-widgets/api-call-widget.asciidoc @@ -12,7 +12,7 @@ aria-controls="self-managed-tab-api-call" id="self-managed-api-call" tabindex="-1"> - Self-managed + Local Dev (Docker)

- Elasticsearch Service + Elastic Cloud
> for advanced Docker documentation. - -. Run the following Docker commands: -+ -[source,sh,subs="attributes"] ----- -docker network create elastic -docker pull {docker-image} -docker run --name es01 --net elastic -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -t {docker-image} ----- - -. Copy the generated `elastic` password and enrollment token, which are output to your terminal. -You'll use these to enroll {kib} with your {es} cluster and log in. -These credentials are only shown when you start {es} for the first time. -+ -We recommend storing the `elastic` password as an environment variable in your shell. Example: -+ -[source,sh] ----- -export ELASTIC_PASSWORD="your_password" ----- -+ -. Copy the `http_ca.crt` SSL certificate from the container to your local machine. -+ -[source,sh] ----- -docker cp es01:/usr/share/elasticsearch/config/certs/http_ca.crt . ----- -+ -. Make a REST API call to {es} to ensure the {es} container is running. -+ -[source,sh] ----- -curl --cacert http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200 ----- -// NOTCONSOLE - -*Run {kib}* - -{kib} is the user interface for Elastic. -It's great for getting started with {es} and exploring your data. -We'll be using the Dev Tools *Console* in {kib} to make REST API calls to {es}. - -In a new terminal session, start {kib} and connect it to your {es} container: - -[source,sh,subs="attributes"] ----- -docker pull {kib-docker-image} -docker run --name kibana --net elastic -p 5601:5601 {kib-docker-image} ----- - -When you start {kib}, a unique URL is output to your terminal. -To access {kib}: - -. Open the generated URL in your browser. -. Paste the enrollment token that you copied earlier, to connect your {kib} instance with {es}. -. Log in to {kib} as the `elastic` user with the password that was generated when you started {es}. +Refer to our <> to quickly spin up a local development environment in Docker. If you don't need {kib}, you'll only need one `docker run` command to start {es}. Please note that this setup is *not suitable for production use*. // end::self-managed[] \ No newline at end of file diff --git a/docs/reference/transform/images/transform-rule.png b/docs/reference/transform/images/transform-rule.png deleted file mode 100644 index c43dd6c1be929..0000000000000 Binary files a/docs/reference/transform/images/transform-rule.png and /dev/null differ diff --git a/docs/reference/transform/transform-alerts.asciidoc b/docs/reference/transform/transform-alerts.asciidoc index e3ea82d34ec2e..988dc5effe956 100644 --- a/docs/reference/transform/transform-alerts.asciidoc +++ b/docs/reference/transform/transform-alerts.asciidoc @@ -18,19 +18,20 @@ refer to You can create {transform} rules under **{stack-manage-app} > {rules-ui}**. -. On the *Create rule* window, give a name to the rule and optionally provide -tags. Select the {transform} health rule type: +. Click *Create rule* and select the {transform} health rule type. + +. Give a name to the rule and optionally provide tags. + +. Select the {transform} or {transforms} to include. You can also use a special +character (`*`) to apply the rule to all your {transforms}. {transforms-cap} +created after the rule are automatically included. + -- [role="screenshot"] -image::images/transform-rule.png["Creating a transform health rule",500] +image::images/transform-check-config.png["Selecting health check",500] // NOTE: This is screenshot is automatically generated. Do not edit it directly. -- -. Select the {transform} or {transforms} to include. You can also use a special -character (`*`) to apply the rule to all your {transforms}. {transforms-cap} -created after the rule are automatically included. - . The following health checks are available and enabled by default: + -- @@ -41,10 +42,6 @@ _{transform-cap} is not started_:: _Unhealthy {transform}_:: Get alerts when a {transform} has an unhealthy status. The notification message contains status details and related issues. - -[role="screenshot"] -image::images/transform-check-config.png["Selecting health check",500] -// NOTE: This is screenshot is automatically generated. Do not edit it directly. -- . Set the check interval, which defines how often to evaluate the rule conditions. diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/Command.java b/libs/cli/src/main/java/org/elasticsearch/cli/Command.java index 201f0810f4d9b..32c4446e71dd2 100644 --- a/libs/cli/src/main/java/org/elasticsearch/cli/Command.java +++ b/libs/cli/src/main/java/org/elasticsearch/cli/Command.java @@ -17,6 +17,7 @@ import java.io.Closeable; import java.io.IOException; +import java.io.StringWriter; import java.util.Arrays; /** @@ -45,7 +46,7 @@ public Command(final String description) { } /** Parses options for this command from args and executes it. */ - public final int main(String[] args, Terminal terminal, ProcessInfo processInfo) throws Exception { + public final int main(String[] args, Terminal terminal, ProcessInfo processInfo) throws IOException { try { mainWithoutErrorHandling(args, terminal, processInfo); } catch (OptionException e) { @@ -59,6 +60,14 @@ public final int main(String[] args, Terminal terminal, ProcessInfo processInfo) } printUserException(terminal, e); return e.exitCode; + } catch (IOException ioe) { + terminal.errorPrintln(ioe); + return ExitCodes.IO_ERROR; + } catch (Throwable t) { + // It's acceptable to catch Throwable at this point: + // We're about to exit and only want to print the stacktrace with appropriate formatting (e.g. JSON). + terminal.errorPrintln(t); + return ExitCodes.CODE_ERROR; } return ExitCodes.OK; } @@ -96,15 +105,17 @@ public OptionSet parseOptions(String[] args) { /** Prints a help message for the command to the terminal. */ private void printHelp(Terminal terminal, boolean toStdError) throws IOException { + StringWriter writer = new StringWriter(); + parser.printHelpOn(writer); if (toStdError) { terminal.errorPrintln(description); terminal.errorPrintln(""); - parser.printHelpOn(terminal.getErrorWriter()); + terminal.errorPrintln(writer.toString()); } else { terminal.println(description); terminal.println(""); printAdditionalHelp(terminal); - parser.printHelpOn(terminal.getWriter()); + terminal.println(writer.toString()); } } diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java index 69cb76636a996..aaf233438f263 100644 --- a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java +++ b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java @@ -72,6 +72,13 @@ protected Terminal(Reader reader, PrintWriter outWriter, PrintWriter errWriter) this.errWriter = errWriter; } + /** + * Constructs a terminal instance from a delegate instance. + */ + protected Terminal(Terminal delegate) { + this(delegate.reader, delegate.outWriter, delegate.errWriter); + } + /** * Sets the verbosity of the terminal. * @@ -113,14 +120,12 @@ public final Reader getReader() { return reader; } - /** Returns a Writer which can be used to write to the terminal directly using standard output. */ - public final PrintWriter getWriter() { - return outWriter; - } - - /** Returns a Writer which can be used to write to the terminal directly using standard error. */ - public final PrintWriter getErrorWriter() { - return errWriter; + /** + * Returns a line based OutputStream wrapping this Terminal's println. + * Note, this OutputStream is not thread-safe! + */ + public final OutputStream asLineOutputStream(Charset charset) { + return new LineOutputStream(charset); } /** @@ -138,7 +143,7 @@ public InputStream getInputStream() { * Returns an OutputStream which can be used to write to the terminal directly using standard output. * *

May return {@code null} if this Terminal is not capable of binary output. - * This corresponds with the underlying stream of bytes written to by {@link #getWriter()}. + * This corresponds with the underlying stream of bytes written to by {@link #println(CharSequence)}. */ @Nullable public OutputStream getOutputStream() { @@ -152,12 +157,12 @@ public final void println(CharSequence msg) { /** Prints a line to the terminal at {@code verbosity} level. */ public final void println(Verbosity verbosity, CharSequence msg) { - print(verbosity, outWriter, msg, true); + print(verbosity, outWriter, msg, true, true); } /** Prints message to the terminal's standard output at {@code verbosity} level, without a newline. */ public final void print(Verbosity verbosity, String msg) { - print(verbosity, outWriter, msg, false); + print(verbosity, outWriter, msg, false, true); } /** @@ -165,30 +170,49 @@ public final void print(Verbosity verbosity, String msg) { * * Subclasses may override if the writers are not implemented. */ - protected void print(Verbosity verbosity, PrintWriter writer, CharSequence msg, boolean newline) { + protected void print(Verbosity verbosity, PrintWriter writer, CharSequence msg, boolean newline, boolean flush) { if (isPrintable(verbosity)) { if (newline) { writer.println(msg); } else { writer.print(msg); } - writer.flush(); + if (flush) { + writer.flush(); + } } } /** Prints a line to the terminal's standard error at {@link Verbosity#NORMAL} verbosity level, without a newline. */ public final void errorPrint(Verbosity verbosity, String msg) { - print(verbosity, errWriter, msg, false); + print(verbosity, errWriter, msg, false, true); } /** Prints a line to the terminal's standard error at {@link Verbosity#NORMAL} verbosity level. */ public final void errorPrintln(String msg) { - errorPrintln(Verbosity.NORMAL, msg); + print(Verbosity.NORMAL, errWriter, msg, true, true); } /** Prints a line to the terminal's standard error at {@code verbosity} level. */ public final void errorPrintln(Verbosity verbosity, String msg) { - print(verbosity, errWriter, msg, true); + print(verbosity, errWriter, msg, true, true); + } + + /** Prints a line to the terminal's standard error at {@code verbosity} level, with an optional flush */ + public final void errorPrintln(Verbosity verbosity, String msg, boolean flush) { + print(verbosity, errWriter, msg, true, flush); + } + + /** Prints a stacktrace to the terminal's standard error at {@code verbosity} level. */ + public void errorPrintln(Verbosity verbosity, Throwable throwable) { + if (isPrintable(verbosity)) { + throwable.printStackTrace(errWriter); + } + } + + /** Prints a stacktrace to the terminal's standard error at {@link Verbosity#SILENT} verbosity level. */ + public void errorPrintln(Throwable throwable) { + errorPrintln(Verbosity.SILENT, throwable); } /** Checks if is enough {@code verbosity} level to be printed */ @@ -339,4 +363,54 @@ public OutputStream getOutputStream() { return System.out; } } + + /** A line based OutputStream wrapping this Terminal's println, not thread-safe! */ + private class LineOutputStream extends OutputStream { + static final int DEFAULT_BUFFER_LENGTH = 1024; + static final int MAX_BUFFER_LENGTH = DEFAULT_BUFFER_LENGTH * 8; + + private final Charset charset; + private byte[] bytes = new byte[DEFAULT_BUFFER_LENGTH]; + private int count = 0; + + LineOutputStream(Charset charset) { + this.charset = charset; + } + + @Override + public void write(int b) { + if (b == 0) return; + if (b == '\n') { + flush(true); + return; + } + if (count == bytes.length) { + if (count >= MAX_BUFFER_LENGTH) { + flush(false); + } else { + bytes = Arrays.copyOf(bytes, 2 * bytes.length); + } + } + bytes[count++] = (byte) b; + } + + private void flush(boolean newline) { + if (newline && count > 0 && bytes[count - 1] == '\r') { + --count; // drop CR on windows as well + } + String msg = count > 0 ? new String(bytes, 0, count, charset) : ""; + print(Verbosity.NORMAL, outWriter, msg, newline, true); + count = 0; + if (bytes.length > DEFAULT_BUFFER_LENGTH) { + bytes = new byte[DEFAULT_BUFFER_LENGTH]; + } + } + + @Override + public void flush() { + if (count > 0) { + flush(false); + } + } + } } diff --git a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java index 9c1faf911a829..dffb93ebbf230 100644 --- a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java +++ b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java @@ -11,6 +11,17 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase.WithoutSecurityManager; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + @WithoutSecurityManager public class TerminalTests extends ESTestCase { @@ -20,4 +31,33 @@ public void testSystemTerminalIfRedirected() { // Otherwise, JDK 22 doesn't provide a console if redirected. assertEquals(Terminal.SystemTerminal.class, Terminal.DEFAULT.getClass()); } + + public void testTerminalAsLineOutputStream() throws IOException { + PrintWriter stdOut = mock("stdOut"); + PrintWriter stdErr = mock("stdErr"); + + OutputStream out = new Terminal(mock("reader"), stdOut, stdErr) { + }.asLineOutputStream(StandardCharsets.UTF_8); + + out.write("123".getBytes(StandardCharsets.UTF_8)); + out.write("456".getBytes(StandardCharsets.UTF_8)); + out.write("789\r\n".getBytes(StandardCharsets.UTF_8)); // CR is removed as well + + verify(stdOut).println(eq((CharSequence) "123456789")); + verify(stdOut).flush(); + verifyNoMoreInteractions(stdOut, stdErr); + + out.write("\n".getBytes(StandardCharsets.UTF_8)); + verify(stdOut).println(eq((CharSequence) "")); + verify(stdOut, times(2)).flush(); + verifyNoMoreInteractions(stdOut, stdErr); + + out.write("a".getBytes(StandardCharsets.UTF_8)); + out.flush(); + verify(stdOut).print(eq((CharSequence) "a")); + verify(stdOut, times(3)).flush(); + + out.flush(); + verifyNoMoreInteractions(stdOut, stdErr); + } } diff --git a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java index 5153ba688d6a9..74acb00925e5a 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java +++ b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java @@ -61,4 +61,15 @@ public static Predicate onOrAfter(RestApiVersion restApiVersion) }; } + public static RestApiVersion forMajor(int major) { + switch (major) { + case 7 -> { + return V_7; + } + case 8 -> { + return V_8; + } + default -> throw new IllegalArgumentException("Unknown REST API version " + major); + } + } } diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java index 5313984ac6d61..0af87154960ad 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java @@ -17,7 +17,10 @@ import java.lang.foreign.MemorySegment; import java.lang.invoke.MethodHandle; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_INT; @@ -26,31 +29,49 @@ class JdkSystemdLibrary implements SystemdLibrary { static { - System.load(findLibSystemd()); - } - - // On some systems libsystemd does not have a non-versioned symlink. System.loadLibrary only knows how to find - // non-versioned library files. So we must manually check the library path to find what we need. - static String findLibSystemd() { - final String libsystemd = "libsystemd.so.0"; - String libpath = System.getProperty("java.library.path"); - for (String basepathStr : libpath.split(":")) { - var basepath = Paths.get(basepathStr); - if (Files.exists(basepath) == false) { - continue; + // Find and load libsystemd. We attempt all instances of + // libsystemd in case of multiarch systems, and stop when + // one is successfully loaded. If none can be loaded, + // UnsatisfiedLinkError will be thrown. + List paths = findLibSystemd(); + if (paths.isEmpty()) { + String libpath = System.getProperty("java.library.path"); + throw new UnsatisfiedLinkError("Could not find libsystemd in java.library.path: " + libpath); + } + UnsatisfiedLinkError last = null; + for (String path : paths) { + try { + System.load(path); + last = null; + break; + } catch (UnsatisfiedLinkError e) { + last = e; } - try (var stream = Files.walk(basepath)) { + } + if (last != null) { + throw last; + } + } - var foundpath = stream.filter(Files::isDirectory).map(p -> p.resolve(libsystemd)).filter(Files::exists).findAny(); - if (foundpath.isPresent()) { - return foundpath.get().toAbsolutePath().toString(); - } + // findLibSystemd returns a list of paths to instances of libsystemd + // found within java.library.path. + static List findLibSystemd() { + // Note: on some systems libsystemd does not have a non-versioned symlink. + // System.loadLibrary only knows how to find non-versioned library files, + // so we must manually check the library path to find what we need. + final Path libsystemd = Paths.get("libsystemd.so.0"); + final String libpath = System.getProperty("java.library.path"); + return Arrays.stream(libpath.split(":")).map(Paths::get).filter(Files::exists).flatMap(p -> { + try { + return Files.find( + p, + Integer.MAX_VALUE, + (fp, attrs) -> (attrs.isDirectory() == false && fp.getFileName().equals(libsystemd)) + ); } catch (IOException e) { throw new UncheckedIOException(e); } - - } - throw new UnsatisfiedLinkError("Could not find " + libsystemd + " in java.library.path: " + libpath); + }).map(p -> p.toAbsolutePath().toString()).toList(); } private static final MethodHandle sd_notify$mh = downcallHandle("sd_notify", FunctionDescriptor.of(JAVA_INT, JAVA_INT, ADDRESS)); diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml index b1b9623c8769c..9f30deebc9fbd 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml @@ -382,9 +382,9 @@ setup: --- "Daylight with offset date_histogram test": - - skip: - version: "- 7.16.99" - reason: Bug fixed before 7.16.99 + - requires: + cluster_features: "gte_v7.17.0" + reason: Bug fixed with 7.17 - do: search: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/global_with_aliases.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/global_with_aliases.yml index f1ec41bdfe622..864b122e72020 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/global_with_aliases.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/global_with_aliases.yml @@ -1,6 +1,6 @@ "global agg with a terms filtered alias": - - skip: - version: "- 8.9.99" + - requires: + cluster_features: "gte_v8.10.0" reason: Fixed in 8.10 - do: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ignored_metadata_field.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ignored_metadata_field.yml index fd15d24a5f3ca..34ae07c35bb2a 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ignored_metadata_field.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ignored_metadata_field.yml @@ -65,8 +65,8 @@ setup: --- "terms aggregation on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -100,8 +100,8 @@ setup: --- "terms aggregation on _ignored metadata field with top hits": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -136,8 +136,8 @@ setup: --- "date histogram aggregation with terms on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -172,8 +172,8 @@ setup: --- "cardinality aggregation on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -189,8 +189,8 @@ setup: --- "value count aggregation on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -206,8 +206,8 @@ setup: --- "date range aggregation with terms on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -249,8 +249,8 @@ setup: --- "random sampler aggregation with terms on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -280,10 +280,10 @@ setup: --- "filter aggregation on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - features: close_to + test_runner_features: close_to - do: search: body: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml index 650c8447c5b10..d9298a832e650 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml @@ -26,8 +26,8 @@ setup: --- "fail formatting": - - skip: - version: "- 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: fixed in 8.15.0 - do: catch: /Cannot format stat \[sum\] with format \[DocValueFormat.DateTime\(format\[date_hour_minute_second_millis\] locale\[\], Z, MILLISECONDS\)\]/ diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java index 3802d572e04dd..5fe72c38078ee 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java @@ -201,12 +201,12 @@ public void testGeneralMockupEcsMappings() throws Exception { "host": { "cpu": { "usage": 0.68 - } - }, - "geo": { - "location": { - "lon": -73.614830, - "lat": 45.505918 + }, + "geo": { + "location": { + "lon": -73.614830, + "lat": 45.505918 + } } }, "data_stream": { @@ -414,7 +414,10 @@ public void testGeneralMockupEcsMappings() throws Exception { getValueFromPath(properties, List.of("host", "properties", "cpu", "properties", "usage", "scaling_factor")), is(1000.0) ); - assertThat(getValueFromPath(properties, List.of("geo", "properties", "location", "type")), is("geo_point")); + assertThat( + getValueFromPath(properties, List.of("host", "properties", "geo", "properties", "location", "type")), + is("geo_point") + ); assertThat(getValueFromPath(properties, List.of("data_stream", "properties", "dataset", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream", "properties", "namespace", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream", "properties", "type", "type")), is("constant_keyword")); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java index 2370cca08b23e..79d33a95c4709 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java @@ -463,7 +463,6 @@ public void testNoSubobjects() throws Exception { { "@timestamp": "2023-06-12", "start_timestamp": "2023-06-08", - "location" : "POINT (-71.34 41.12)", "test": "flattened", "test.start_timestamp": "not a date", "test.start-timestamp": "not a date", @@ -497,7 +496,7 @@ public void testNoSubobjects() throws Exception { "vulnerability.score.version": "2.0", "vulnerability.textual_score": "bad", "host.cpu.usage": 0.68, - "geo.location": [-73.614830, 45.505918], + "host.geo.location": [-73.614830, 45.505918], "data_stream.dataset": "nginx.access", "data_stream.namespace": "production", "data_stream.custom": "whatever", @@ -521,8 +520,7 @@ public void testNoSubobjects() throws Exception { }, "fields": [ "data_stream.type", - "location", - "geo.location", + "host.geo.location", "test.start-timestamp", "test.start_timestamp", "vulnerability.textual_score" @@ -537,14 +535,9 @@ public void testNoSubobjects() throws Exception { // verify that data_stream.type has the correct constant_keyword value assertThat(fields.get("data_stream.type"), is(List.of("logs"))); // verify geo_point subfields evaluation - assertThat(((List>) fields.get("location")).get(0).get("type"), is("Point")); - List coordinates = ((List>>) fields.get("location")).get(0).get("coordinates"); - assertThat(coordinates.size(), is(2)); - assertThat(coordinates.get(0), equalTo(-71.34)); - assertThat(coordinates.get(1), equalTo(41.12)); - List geoLocation = (List) fields.get("geo.location"); + List geoLocation = (List) fields.get("host.geo.location"); assertThat(((Map) geoLocation.get(0)).get("type"), is("Point")); - coordinates = ((Map>) geoLocation.get(0)).get("coordinates"); + List coordinates = ((Map>) geoLocation.get(0)).get("coordinates"); assertThat(coordinates.size(), is(2)); assertThat(coordinates.get(0), equalTo(-73.614830)); assertThat(coordinates.get(1), equalTo(45.505918)); @@ -612,8 +605,7 @@ public void testNoSubobjects() throws Exception { assertThat(getValueFromPath(properties, List.of("vulnerability.textual_score", "type")), is("float")); assertThat(getValueFromPath(properties, List.of("host.cpu.usage", "type")), is("scaled_float")); assertThat(getValueFromPath(properties, List.of("host.cpu.usage", "scaling_factor")), is(1000.0)); - assertThat(getValueFromPath(properties, List.of("location", "type")), is("geo_point")); - assertThat(getValueFromPath(properties, List.of("geo.location", "type")), is("geo_point")); + assertThat(getValueFromPath(properties, List.of("host.geo.location", "type")), is("geo_point")); assertThat(getValueFromPath(properties, List.of("data_stream.dataset", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream.namespace", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream.type", "type")), is("constant_keyword")); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/160_unsupported_setting.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/160_unsupported_setting.yml index d74bd2e598a86..5b6ece610af32 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/160_unsupported_setting.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/160_unsupported_setting.yml @@ -1,8 +1,8 @@ bad setting fails: - skip: - version: all - reason: https://github.com/elastic/elasticsearch/issues/78677 - features: allowed_warnings + awaits_fix: https://github.com/elastic/elasticsearch/issues/78677 + - requires: + test_runner_features: allowed_warnings - do: allowed_warnings: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml index 18aee1bf77232..1cf44312ae7d5 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml @@ -52,8 +52,7 @@ setup: --- "Get data stream with default lifecycle": - skip: - version: all - reason: https://github.com/elastic/elasticsearch/pull/100187 + awaits_fix: https://github.com/elastic/elasticsearch/pull/100187 - do: indices.get_data_lifecycle: diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml index c47dacacde3d8..68d1fa3da0dfc 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -98,8 +98,8 @@ teardown: --- "Test Drop Processor with Upsert (_bulk)": - - skip: - version: ' - 8.12.0' + - requires: + cluster_features: "gte_v8.12.1" reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.12.1' - do: ingest.put_pipeline: @@ -139,8 +139,8 @@ teardown: --- "Test Drop Processor with Upsert (_update)": - - skip: - version: ' - 8.12.0' + - requires: + cluster_features: "gte_v8.12.1" reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.12.1' - do: ingest.put_pipeline: diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_reroute_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_reroute_processor.yml index e2f4e32777a1f..b2cbb352448ab 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_reroute_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_reroute_processor.yml @@ -24,8 +24,7 @@ teardown: --- "Test first matching router terminates pipeline": - skip: - version: all - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102144" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/102144" - do: ingest.put_pipeline: id: "pipeline-with-two-data-stream-processors" diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml index 0bf623e8ff263..58c59e6852306 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml @@ -76,8 +76,8 @@ teardown: --- "Test Fail Processor with Upsert (bulk)": - - skip: - version: ' - 8.12.0' + - requires: + cluster_features: "gte_v8.12.1" reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.12.1' - do: ingest.put_pipeline: diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml index 40d646cc645f5..35b509eec9b45 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml @@ -119,9 +119,9 @@ teardown: --- profile fetch: - - skip: - version: ' - 8.14.99' - reason: fetch fields and stored_fields using ValueFetcher + - requires: + cluster_features: "gte_v8.15.0" + reason: "fetch fields and stored_fields using ValueFetcher" - do: search: diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java index 9ee714c3c4d59..d9ab689c05a5c 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java @@ -50,31 +50,26 @@ public class AzureStorageCleanupThirdPartyTests extends AbstractThirdPartyReposi System.getProperty("test.azure.container") ); - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testCreateSnapshot() { super.testCreateSnapshot(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testIndexLatest() throws Exception { super.testIndexLatest(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testListChildren() { super.testListChildren(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testCleanup() throws Exception { super.testCleanup(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testReadFromPositionWithLength() { super.testReadFromPositionWithLength(); @@ -162,7 +157,6 @@ private void ensureSasTokenPermissions() { future.actionGet(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") public void testMultiBlockUpload() throws Exception { final BlobStoreRepository repo = getRepository(); // The configured threshold for this test suite is 1mb diff --git a/modules/rest-root/src/yamlRestTest/resources/rest-api-spec/test/info/10_info.yml b/modules/rest-root/src/yamlRestTest/resources/rest-api-spec/test/info/10_info.yml index 91ae0a7160698..556f53357135f 100644 --- a/modules/rest-root/src/yamlRestTest/resources/rest-api-spec/test/info/10_info.yml +++ b/modules/rest-root/src/yamlRestTest/resources/rest-api-spec/test/info/10_info.yml @@ -11,7 +11,9 @@ --- "Info build flavor": - skip: - version: "8.3.0 - 8.3.2" + known_issues: + - cluster_feature: "gte_v8.3.0" + fixed_by: "gte_v8.3.3" reason: "build flavor in info was missing in 8.3.0 to 8.3.2" - do: {info: {}} - match: { version.build_flavor: default } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index e202d99218144..d2be4212cf41e 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -568,11 +568,9 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } }; // there's only one netty worker thread that's reused across client requests - Settings settings = Settings.builder() - .put(Netty4Plugin.WORKER_COUNT.getKey(), 1) + Settings settings = createBuilderWithPort().put(Netty4Plugin.WORKER_COUNT.getKey(), 1) .put(Netty4Plugin.SETTING_HTTP_WORKER_COUNT.getKey(), 0) .build(); - NioEventLoopGroup group = new NioEventLoopGroup(); AtomicBoolean acceptChannel = new AtomicBoolean(); try ( Netty4HttpServerTransport transport = new Netty4HttpServerTransport( @@ -601,9 +599,9 @@ public boolean test(String profile, InetSocketAddress peerAddress) { ) { transport.start(); int nRetries = randomIntBetween(7, 9); - for (int i = 0; i < nRetries; i++) { - acceptChannel.set(randomBoolean()); - try (Netty4HttpClient client = new Netty4HttpClient()) { + try (Netty4HttpClient client = new Netty4HttpClient()) { + for (int i = 0; i < nRetries; i++) { + acceptChannel.set(randomBoolean()); var responses = client.get(randomFrom(transport.boundAddress().boundAddresses()).address(), "/test/url"); try { if (acceptChannel.get()) { @@ -619,8 +617,6 @@ public boolean test(String profile, InetSocketAddress peerAddress) { } } } - } finally { - group.shutdownGracefully().await(); } } diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 81b3a086e9aca..08e3ac2cbce8c 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -39,8 +39,10 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; /** * Create a simple "daemon controller", put it in the right place and check that it runs. @@ -64,18 +66,19 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { static { // normally done by ESTestCase, but need here because spawner depends on logging LogConfigurator.loadLog4jPlugins(); + MockLogAppender.init(); } static class ExpectedStreamMessage implements MockLogAppender.LoggingExpectation { final String expectedLogger; final String expectedMessage; - final CountDownLatch matchCalledLatch; - boolean saw; + final CountDownLatch matched; + volatile boolean saw; - ExpectedStreamMessage(String logger, String message, CountDownLatch matchCalledLatch) { + ExpectedStreamMessage(String logger, String message, CountDownLatch matched) { this.expectedLogger = logger; this.expectedMessage = message; - this.matchCalledLatch = matchCalledLatch; + this.matched = matched; } @Override @@ -84,8 +87,8 @@ public void match(LogEvent event) { && event.getLevel().equals(Level.WARN) && event.getMessage().getFormattedMessage().equals(expectedMessage)) { saw = true; + matched.countDown(); } - matchCalledLatch.countDown(); } @Override @@ -129,7 +132,7 @@ public void testNoControllerSpawn() throws IOException { try (Spawner spawner = new Spawner()) { spawner.spawnNativeControllers(environment); - assertThat(spawner.getProcesses(), hasSize(0)); + assertThat(spawner.getProcesses(), is(empty())); } } @@ -228,7 +231,7 @@ private void assertControllerSpawns(final Function pluginsDir // fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling assertTrue(process.waitFor(1, TimeUnit.SECONDS)); } else { - assertThat(processes, hasSize(0)); + assertThat(processes, is(empty())); } appender.assertAllExpectationsMatched(); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java new file mode 100644 index 0000000000000..4837afbf6ccd2 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import java.util.function.Supplier; + +public abstract class AbstractRollingUpgradeTestCase extends ParameterizedRollingUpgradeTestCase { + + private static final TemporaryFolder repoDirectory = new TemporaryFolder(); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(NODE_NUM) + .setting("path.repo", new Supplier<>() { + @Override + @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") + public String get() { + return repoDirectory.getRoot().getPath(); + } + }) + .setting("xpack.security.enabled", "false") + .feature(FeatureFlag.TIME_SERIES_MODE) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + protected AbstractRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java index 0487b282179a9..73abb634dfd76 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; -public class ClusterFeatureMigrationIT extends ParameterizedRollingUpgradeTestCase { +public class ClusterFeatureMigrationIT extends AbstractRollingUpgradeTestCase { @Before public void checkMigrationVersion() { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 73d91ac41fcb7..c7f99b3525f74 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -public class DesiredNodesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class DesiredNodesUpgradeIT extends AbstractRollingUpgradeTestCase { private final int desiredNodesVersion; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java index 757f793ac4c46..488cd966ed65e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; -public class DownsampleIT extends ParameterizedRollingUpgradeTestCase { +public class DownsampleIT extends AbstractRollingUpgradeTestCase { private static final String FIXED_INTERVAL = "1h"; private String index; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java index 4fe45c05b157b..fc77eef0ae8bb 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class FeatureUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class FeatureUpgradeIT extends AbstractRollingUpgradeTestCase { public FeatureUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java index 860cd2c0e8617..306447d8cc2cd 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java @@ -40,7 +40,7 @@ * the co-ordinating node if older nodes were included in the system */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103473") -public class FieldCapsIT extends ParameterizedRollingUpgradeTestCase { +public class FieldCapsIT extends AbstractRollingUpgradeTestCase { public FieldCapsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 0f210ee4b2450..6647cb413c9f5 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -20,7 +20,7 @@ import static org.hamcrest.CoreMatchers.equalTo; -public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class HealthNodeUpgradeIT extends AbstractRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java index 874fac615b9b1..1477e2b63cf03 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java @@ -26,7 +26,7 @@ import java.util.Locale; import java.util.Map; -public class IgnoredMetaFieldRollingUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class IgnoredMetaFieldRollingUpgradeIT extends AbstractRollingUpgradeTestCase { private static final String TERMS_AGG_QUERY = Strings.format(""" { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 82485130f05ce..157e2293b69ae 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -51,7 +51,7 @@ * xpack rolling restart tests. We should work on a way to remove this * duplication but for now we have no real way to share code. */ -public class IndexingIT extends ParameterizedRollingUpgradeTestCase { +public class IndexingIT extends AbstractRollingUpgradeTestCase { public IndexingIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index 63ed54d05adf2..d5f645c387d61 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -14,74 +14,45 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.TestFeatureService; import org.junit.AfterClass; import org.junit.Before; -import org.junit.ClassRule; -import org.junit.rules.RuleChain; -import org.junit.rules.TemporaryFolder; -import org.junit.rules.TestRule; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.function.Supplier; import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; public abstract class ParameterizedRollingUpgradeTestCase extends ESRestTestCase { + protected static final int NODE_NUM = 3; private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); - - private static final TemporaryFolder repoDirectory = new TemporaryFolder(); - - private static final int NODE_NUM = 3; - - private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) - .nodes(NODE_NUM) - .setting("path.repo", new Supplier<>() { - @Override - @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") - public String get() { - return repoDirectory.getRoot().getPath(); - } - }) - .setting("xpack.security.enabled", "false") - .feature(FeatureFlag.TIME_SERIES_MODE) - .build(); - - @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); - - @ParametersFactory(shuffle = false) - public static Iterable parameters() { - return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); - } - private static final Set upgradedNodes = new HashSet<>(); private static TestFeatureService oldClusterTestFeatureService = null; private static boolean upgradeFailed = false; private static IndexVersion oldIndexVersion; - private final int requestedUpgradedNodes; protected ParameterizedRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { this.requestedUpgradedNodes = upgradedNodes; } + @ParametersFactory(shuffle = false) + public static Iterable parameters() { + return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); + } + + protected abstract ElasticsearchCluster getUpgradeCluster(); + @Before public void extractOldClusterFeatures() { if (isOldCluster() && oldClusterTestFeatureService == null) { @@ -135,7 +106,7 @@ public void upgradeNode() throws Exception { if (upgradedNodes.add(n)) { try { logger.info("Upgrading node {} to version {}", n, Version.CURRENT); - cluster.upgradeNodeToVersion(n, Version.CURRENT); + getUpgradeCluster().upgradeNodeToVersion(n, Version.CURRENT); } catch (Exception e) { upgradeFailed = true; throw e; @@ -199,7 +170,7 @@ protected static boolean isUpgradedCluster() { @Override protected String getTestRestCluster() { - return cluster.getHttpAddresses(); + return getUpgradeCluster().getHttpAddresses(); } @Override diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index ef80643c82c0d..593630546845d 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; -public class SnapshotBasedRecoveryIT extends ParameterizedRollingUpgradeTestCase { +public class SnapshotBasedRecoveryIT extends AbstractRollingUpgradeTestCase { public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java index fbd6ee8aa3759..a2e3b03c9036f 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -public class SystemIndicesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class SystemIndicesUpgradeIT extends AbstractRollingUpgradeTestCase { public SystemIndicesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index 3ce0fc79087c2..2889885f83984 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class TsdbIT extends ParameterizedRollingUpgradeTestCase { +public class TsdbIT extends AbstractRollingUpgradeTestCase { public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index 3af344051030b..8dc3b43abf3e1 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -24,7 +24,7 @@ import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.is; -public class UpgradeWithOldIndexSettingsIT extends ParameterizedRollingUpgradeTestCase { +public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCase { public UpgradeWithOldIndexSettingsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java index e78e0978b1d80..21dbad9487d4e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -22,7 +22,7 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; -public class VectorSearchIT extends ParameterizedRollingUpgradeTestCase { +public class VectorSearchIT extends AbstractRollingUpgradeTestCase { public VectorSearchIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java index dade5b53addae..6379a8875dfb4 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java @@ -22,7 +22,7 @@ * Basic tests for simple xpack functionality that are only run if the * cluster is the on the default distribution. */ -public class XPackIT extends ParameterizedRollingUpgradeTestCase { +public class XPackIT extends AbstractRollingUpgradeTestCase { public XPackIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index 6d6ee1f6bed41..a42b987a9bddd 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'ingest simulate added in 8.12' --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc index 0fcedece97f01..baec8169b4f76 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc @@ -143,7 +143,7 @@ The `cluster_features` field can either be a string or an array of strings. [[synthetic_cluster_features]] Note: In order to smoothen the transition from version checks to cluster feature checks, a REST-test specific -synthetic cluster feature named `gte_v{VERSION}` is available for all release versions until including 8.14.0. +synthetic cluster feature named `gte_v{VERSION}` is available for all release versions up to 8.15.0. For instance, `gte_v8.12.2` would be available for all release versions greater than or equal to 8.12.2. [[skip_known_issues]] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.segments/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.segments/10_basic.yml index 25f1230fb521e..646530214bf09 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.segments/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.segments/10_basic.yml @@ -172,9 +172,14 @@ --- tsdb: + - requires: + cluster_features: "gte_v8.5.0" + reason: "Serialization for segment stats fixed in 8.5.0" - skip: - version: " - 8.4.99, 8.7.00 - 8.9.99" - reason: Serialization for segment stats fixed in 8.5.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/40_time_series.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/40_time_series.yml index 2d43d22da4ccf..4af42f3e2dfbb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/40_time_series.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/40_time_series.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.1.0" + reason: "Introduced in 8.1.0" - skip: - version: " - 8.0.99, 8.7.00 - 8.9.99" - reason: introduced in 8.1.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml index 667caf1ba92a7..e50ab9c65e0f7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: "- 8.1.99" + - requires: + cluster_features: "gte_v8.2.0" reason: Field type filters were added in 8.2 - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml index a000a9eac16ad..4e97b9fd44109 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml @@ -1,7 +1,7 @@ --- "cluster health basic test": - - skip: - version: "- 8.6.99" + - requires: + cluster_features: "gte_v8.7.0" reason: "health was added in 8.2.0, master_is_stable in 8.4.0, and REST API updated in 8.7" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/30_feature.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/30_feature.yml index 449954220a1ea..335d02421b0a1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/30_feature.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/30_feature.yml @@ -1,7 +1,7 @@ --- "cluster health test drilling down into a feature": - - skip: - version: "- 8.6.99" + - requires: + cluster_features: "gte_v8.7.0" reason: "the API path changed in 8.7" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/40_diagnosis.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/40_diagnosis.yml index 76b81354b7413..0d9ac3017420c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/40_diagnosis.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/40_diagnosis.yml @@ -1,7 +1,7 @@ --- "Diagnosis": - - skip: - version: "- 8.6.99" + - requires: + cluster_features: "gte_v8.7.0" reason: "the API path changed in 8.7" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml index d765decda68a8..4f26a69712e83 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml @@ -333,9 +333,9 @@ setup: --- "Deprecated local parameter": - - skip: - version: "- 8.11.99" - features: ["warnings"] + - requires: + cluster_features: "gte_v8.12.0" + test_runner_features: ["warnings"] reason: verifying deprecation warnings from 8.12.0 onwards - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml index 280a645318dd9..45bcf64f98945 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml @@ -352,10 +352,10 @@ --- "Composable index templates that include subobjects: false at root": - - skip: - version: ' - 8.10.99' - reason: 'https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0' - features: allowed_warnings + - requires: + cluster_features: "gte_v8.11.0" + reason: "https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0" + test_runner_features: "allowed_warnings" - do: cluster.put_component_template: @@ -399,10 +399,10 @@ --- "Composable index templates that include subobjects: false on arbitrary field": - - skip: - version: ' - 8.10.99' - reason: 'https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0' - features: allowed_warnings + - requires: + cluster_features: "gte_v8.11.0" + reason: "https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0" + test_runner_features: "allowed_warnings" - do: cluster.put_component_template: @@ -494,10 +494,10 @@ - match: { test-generic.mappings.properties.field.ignore_above: 1024 } --- "Using deprecated component templates and pipelines in index template": - - skip: - version: ' - 8.11.99' - reason: 'The deprecated flags have been introduced in 8.12.0' - features: allowed_warnings + - requires: + cluster_features: "gte_v8.12.0" + reason: "The deprecated flags have been introduced in 8.12.0" + test_runner_features: "allowed_warnings" - do: cluster.put_component_template: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/20_update_non_dynamic_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/20_update_non_dynamic_settings.yml index 07c0e8b7a8b2a..c75b437110413 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/20_update_non_dynamic_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/20_update_non_dynamic_settings.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'ability to update non-dynamic settings added in 8.12' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml index e0b8f56282c05..ae3eadded108b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml @@ -81,8 +81,7 @@ setup: --- "put settings in list of indices": - skip: - version: "all" - reason: list of indices not implemented yet + awaits_fix: list of indices not implemented yet - do: indices.put_settings: index: test_index1, test_index2 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml index 236653b7ca9ad..73ab9c18a8ec3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml @@ -1,9 +1,9 @@ --- "Simulate template without a template in the body": - - skip: - version: " - 7.8.99" + - requires: + cluster_features: "gte_v7.9.0" reason: "only available in 7.9+" - features: ["default_shards"] + test_runner_features: ["default_shards"] - do: indices.put_index_template: @@ -30,10 +30,10 @@ --- "Simulate index template specifying a new template": - - skip: - version: " - 7.8.99" + - requires: + cluster_features: "gte_v7.9.0" reason: "only available in 7.9+" - features: ["default_shards"] + test_runner_features: ["default_shards"] - do: indices.put_index_template: @@ -84,10 +84,10 @@ --- "Simulate template matches overlapping legacy and composable templates": - - skip: - version: " - 7.8.99" + - requires: + cluster_features: "gte_v7.9.0" reason: "only available in 7.9+" - features: ["allowed_warnings", "default_shards"] + test_runner_features: ["allowed_warnings", "default_shards"] - do: indices.put_template: @@ -147,10 +147,10 @@ --- "Simulate replacing a template with a newer version": - - skip: - version: " - 7.99.99" + - requires: + cluster_features: "gte_v8.0.0" reason: "not yet backported" - features: ["allowed_warnings", "default_shards"] + test_runner_features: ["allowed_warnings", "default_shards"] - do: indices.put_index_template: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.validate_query/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.validate_query/10_basic.yml index 2221d08c0b7e2..673d3877d356b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.validate_query/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.validate_query/10_basic.yml @@ -12,8 +12,8 @@ setup: --- "Validate query api": - - skip: - version: ' - 7.6.99' + - requires: + cluster_features: "gte_v7.7.0" reason: message changed in 7.7.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/30_discovery.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/30_discovery.yml index 9b12a2713e19c..50c96dcee0621 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/30_discovery.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/30_discovery.yml @@ -43,9 +43,9 @@ --- "Master timing stats": - - skip: - features: [arbitrary_key] - version: "- 7.15.99" + - requires: + test_runner_features: ["arbitrary_key"] + cluster_features: ["gte_v7.16.0"] reason: "master timing stats added in 7.16.0" - do: @@ -139,9 +139,9 @@ --- "Master cluster applier stats": - - skip: - features: [arbitrary_key] - version: "- 7.15.99" + - requires: + test_runner_features: ["arbitrary_key"] + cluster_features: ["gte_v7.16.0"] reason: "Cluster state applier stats available since 7.16.0" - do: @@ -161,9 +161,9 @@ --- "Master serialization stats": - - skip: - features: [arbitrary_key] - version: "- 7.15.99" + - requires: + test_runner_features: ["arbitrary_key"] + cluster_features: ["gte_v7.16.0"] reason: "master serialization stats added in 7.16.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml index 3551d022c2f4a..cdd1223d67f11 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: synthetic source support added in 8.15 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index a26bc22df8936..3ae8f8b09aa4a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -95,8 +95,8 @@ teardown: - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} --- "Test hybrid search with knn where automatically disables weighted mode": - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'kNN was not correctly skipped until 8.12' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml index cd3751dbb9653..f66b6216e2426 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'check of preTags and postTags params for empty values was added in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml index 17f328046833e..1043d2881d2c3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml @@ -86,8 +86,8 @@ setup: --- "Unified highlighter with stored fields and disabled source": - - skip: - version: "- 7.10.1" + - requires: + cluster_features: "gte_v7.10.2" reason: "bug fix introduced in 7.10.2" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml index 23682a19ea6f7..fcd5b49c984c9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'standard retriever added in 8.14' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/20_knn_retriever.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/20_knn_retriever.yml index 68755f80c428d..d08a8e2a6d39c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/20_knn_retriever.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/20_knn_retriever.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'kNN retriever added in 8.14' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml index 32558dbe5a8c0..72c6abab22600 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: 'nested kNN search added in 8.11' - do: indices.create: @@ -143,8 +143,8 @@ setup: - match: {hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0"} --- "nested kNN search inner_hits size > 1": - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'inner_hits on nested kNN search added in 8.13' - do: @@ -265,10 +265,10 @@ setup: - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "1" } --- "nested kNN search inner_hits & boosting": - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'inner_hits on nested kNN search added in 8.13' - features: close_to + test_runner_features: close_to - do: search: @@ -309,8 +309,8 @@ setup: - close_to: { hits.hits.2.inner_hits.nested.hits.hits.0._score: {value: 0.00002, error: 0.00001} } --- "nested kNN search inner_hits & profiling": - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'bugfix for inner_hits and profiling in 8.13' - do: search: @@ -329,8 +329,8 @@ setup: - is_true : profile --- "nested kNN search with filter that might match nested docs": - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'bugfix for matching non-nested docs in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml index 849df86a30568..618951711cffd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml @@ -1,7 +1,7 @@ # test how knn query interacts with filters setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml index eb70e5b7bcf64..c6f3e187f7953 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml @@ -1,9 +1,9 @@ # test how knn query interacts with filters setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - features: close_to + test_runner_features: "close_to" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml index 53cc7eb064270..79ff3f61742f8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - do: indices.create: @@ -212,8 +212,8 @@ setup: - match: {hits.total.value: 0} --- "nested kNN search inner_hits size > 1": - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'inner_hits on nested kNN search added in 8.13' - do: @@ -321,8 +321,8 @@ setup: - length: { hits.hits.4.inner_hits.nested.hits.hits: 1 } --- "nested kNN query search with filter that might match nested docs": - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'bugfix for matching non-nested docs in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml index 0ea24686ff839..28ecd8ef59c02 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml @@ -1,9 +1,9 @@ # test how knn query interact with other queries setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - features: close_to + test_runner_features: close_to - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/150_knn_search_missing_params.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/150_knn_search_missing_params.yml index 23c6b62842e9f..9716762a131b7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/150_knn_search_missing_params.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/150_knn_search_missing_params.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: '[k] and [num_candidates] were made optional for kNN search in 8.13.0' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml index 4a884b644c6a7..02962e049e267 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: '[k] and [num_candidates] were made optional for kNN query in 8.13.0' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml index 71f65220eba1e..44f17e2269027 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'hex encoding for byte vectors was added in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/175_knn_query_hex_encoded_byte_vectors.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/175_knn_query_hex_encoded_byte_vectors.yml index 9f850400a09cd..e01f3ec18b8c3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/175_knn_query_hex_encoded_byte_vectors.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/175_knn_query_hex_encoded_byte_vectors.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'hex encoding for byte vectors was added in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml index 8471bd8cb5a9a..7f0c24e217d14 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 7.99.99' + - requires: + cluster_features: "gte_v8.0.0" reason: 'kNN search added in 8.0' - do: indices.create: @@ -61,8 +61,8 @@ setup: --- "kNN search only": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: search: @@ -82,8 +82,8 @@ setup: - match: {hits.hits.1.fields.name.0: "rabbit.jpg"} --- "kNN multi-field search only": - - skip: - version: ' - 8.6.99' + - requires: + cluster_features: "gte_v8.7.0" reason: 'multi-field kNN search added to search endpoint in 8.7' - do: search: @@ -101,8 +101,8 @@ setup: - match: {hits.hits.1.fields.name.0: "moose.jpg"} --- "kNN search plus query": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: search: @@ -128,8 +128,8 @@ setup: - match: {hits.hits.2.fields.name.0: "rabbit.jpg"} --- "kNN multi-field search with query": - - skip: - version: ' - 8.6.99' + - requires: + cluster_features: "gte_v8.7.0" reason: 'multi-field kNN search added to search endpoint in 8.7' - do: search: @@ -153,8 +153,8 @@ setup: - match: {hits.hits.2.fields.name.0: "moose.jpg"} --- "kNN search with filter": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: search: @@ -194,8 +194,8 @@ setup: --- "kNN search with explicit search_type": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: catch: bad_request @@ -238,10 +238,10 @@ setup: --- "kNN search with filter in _knn_search endpoint": - - skip: - version: ' - 8.1.99' + - requires: + cluster_features: "gte_v8.2.0" reason: 'kNN with filtering added in 8.2' - features: ["allowed_warnings"] + test_runner_features: ["allowed_warnings"] - do: allowed_warnings: - "The kNN search API has been replaced by the `knn` option in the search API." @@ -284,8 +284,8 @@ setup: --- "Test nonexistent field": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: catch: bad_request @@ -303,8 +303,8 @@ setup: --- "KNN Vector similarity search only": - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'kNN similarity added in 8.8' - do: search: @@ -324,8 +324,8 @@ setup: - match: {hits.hits.0.fields.name.0: "moose.jpg"} --- "Vector similarity with filter only": - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'kNN similarity added in 8.8' - do: search: @@ -361,10 +361,10 @@ setup: - length: {hits.hits: 0} --- "Knn search with mip": - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: 'mip similarity added in 8.11' - features: close_to + test_runner_features: "close_to" - do: indices.create: @@ -450,10 +450,10 @@ setup: - close_to: {hits.hits.0._score: {value: 33686.29, error: 0.01}} --- "Knn search with _name": - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: 'support for _name in knn was added in 8.15' - features: close_to + test_runner_features: "close_to" - do: search: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml index b1933ebde297d..842f71068a34b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 7.99.99' + - requires: + cluster_features: "gte_v8.0.0" reason: 'kNN search added in 8.0' - do: indices.create: @@ -96,10 +96,10 @@ setup: --- "kNN search only regular query": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - features: close_to + test_runner_features: "close_to" - do: search: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml index b61bc939f8f88..e6c669ef7b534 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'kNN float to byte quantization added in 8.12' - do: indices.create: @@ -394,8 +394,8 @@ setup: type: int8_hnsw --- "Test create, merge, and search cosine": - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'kNN float to byte quantization added in 8.12' - do: indices.create: @@ -467,8 +467,8 @@ setup: - match: { hits.hits.2._id: "3"} --- "Test create, merge, and search dot_product": - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'kNN float to byte quantization added in 8.12' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml index 7da00a02d4285..1b439967ba163 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'kNN flat index added in 8.13' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml index 81d49dad21a70..880b8cab39684 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'kNN int8_flat index added in 8.13' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml index ea21bb69a77b8..983ac2719e71b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.5.99' + - requires: + cluster_features: "gte_v8.6.0" reason: 'byte-sized kNN search added in 8.6' - do: @@ -164,8 +164,8 @@ setup: --- "Vector similarity search only": - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'kNN similarity added in 8.8' - do: search: @@ -185,8 +185,8 @@ setup: - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} --- "Vector similarity with filter only": - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'kNN similarity added in 8.8' - do: search: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/50_dense_vector_field_usage.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/50_dense_vector_field_usage.yml index 854543f7b2144..db0437637fc20 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/50_dense_vector_field_usage.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/50_dense_vector_field_usage.yml @@ -1,8 +1,8 @@ setup: - - skip: - features: headers - version: ' - 7.99.99' + - requires: + cluster_features: "gte_v8.0.0" reason: 'kNN search added in 8.0' + test_runner_features: "headers" - do: indices.create: index: futest @@ -50,10 +50,10 @@ setup: --- "Field usage": - - skip: - version: ' - 8.0.99' + - requires: + cluster_features: "gte_v8.1.0" reason: 'dense_vector field usage was added in 8.1' - features: ["allowed_warnings"] + test_runner_features: ["allowed_warnings"] - do: allowed_warnings: - "The kNN search API has been replaced by the `knn` option in the search API." diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml index 545953d2645da..567d338da142c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml @@ -1,11 +1,11 @@ setup: - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: 'Dynamic mapping of floats to dense_vector was added in 8.11' --- "Fields indexed as strings won't be transformed into dense_vector": - - skip: - version: ' - 8.11.0' + - requires: + cluster_features: "gte_v8.11.1" reason: 'Bug fix was added in 8.11.1' - do: index: @@ -572,8 +572,8 @@ setup: --- "Fields mapped as dense_vector without dims or docs have correct cluster stats values": - - skip: - version: ' - 8.11.1' + - requires: + cluster_features: "gte_v8.11.2" reason: 'Bug fix was added in 8.11.2' - do: @@ -603,8 +603,8 @@ setup: --- "Fields mapped as dense_vector have correct cluster stats min max values": - - skip: - version: ' - 8.11.1' + - requires: + cluster_features: "gte_v8.11.2" reason: 'Bug fix was added in 8.11.2' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml index 0672e27b43c67..4dcfa58e79830 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.4.99' + - requires: + cluster_features: "gte_v8.5.0" reason: 'filtered alias for kNN search added in 8.5' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml index 407313a59c5e8..0238a1781d278 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: 'dense_vector indexed by default was added in 8.11' --- @@ -123,8 +123,8 @@ setup: ef_construction: 200 --- "Default index options for dense_vector": - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'dense_vector indexed as int8_hnsw by default was added in 8.14' - do: indices.create: @@ -149,8 +149,8 @@ setup: - match: { test_default_index_options.mappings.properties.vector.index_options.type: int8_hnsw } --- "Default index options for dense_vector element type byte": - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'dense_vector indexed as int8_hnsw by default was added in 8.14' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml index 5048bc8d4307c..67819881f1b50 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: Collapse with rescore added in 8.15.0 - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml index a778fceee9476..0ae00dff6ce63 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml @@ -58,8 +58,7 @@ setup: --- "pre_filter_shard_size with shards that have no hit": - skip: - version: all - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/92058" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/92058" - do: index: index: index_1 @@ -240,8 +239,8 @@ setup: --- "prefilter on non-indexed date fields": - - skip: - version: "- 8.0.99" + - requires: + cluster_features: "gte_v8.1.0" reason: "doc values search was added in 8.1.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml index 3d0e4347fef6a..40ea75b81d59e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml @@ -1242,8 +1242,10 @@ setup: --- "Test exists query on text field with empty values": - skip: - version: '8.4.0 - 8.5.0' - reason: Regression introduced in 8.4.0, fixed in 8.5.1 + known_issues: + - cluster_feature: "gte_v8.4.0" + fixed_by: "gte_v8.5.1" + reason: "Regression introduced in 8.4.0, fixed in 8.5.1" - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml index fd3d31f8245ea..52b55098ec4db 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -298,8 +298,8 @@ - is_false: hits.hits.0.fields.count_without_dv --- Test unmapped field: - - skip: - version: ' - 7.10.99' + - requires: + cluster_features: "gte_v7.11.0" reason: support was introduced in 7.11 - do: indices.create: @@ -364,8 +364,8 @@ Test unmapped field: - some other text --- Test unmapped fields inside disabled objects: - - skip: - version: ' - 7.10.99' + - requires: + cluster_features: "gte_v7.11.0" reason: support was introduced in 7.11 - do: indices.create: @@ -405,8 +405,8 @@ Test unmapped fields inside disabled objects: - b --- Test nested fields: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -479,8 +479,8 @@ Test nested fields: - is_false: hits.hits.0.fields --- Test nested field inside object structure: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -594,8 +594,8 @@ Test nested field inside object structure: hits.hits.1.fields.obj\.products.1: { "manufacturer" : ["RealTec"]} --- Test doubly nested fields: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -663,8 +663,8 @@ Test doubly nested fields: --- Test nested fields with unmapped subfields: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -730,8 +730,8 @@ Test nested fields with unmapped subfields: hits.hits.0.fields.user.0: { "address.city" : ["Berlin"]} --- Test nested fields with ignored subfields: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -773,8 +773,8 @@ Test nested fields with ignored subfields: - { "first" : [ "John" ] } --- Test nested field with sibling field resolving to DocValueFetcher: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -824,8 +824,8 @@ Test nested field with sibling field resolving to DocValueFetcher: hits.hits.0.fields.products.1: { "manufacturer" : ["HyperSmart"]} --- "Test ignores malformed values while returning valid ones": - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: 'Behaviour changed in 7.12' - do: indices.create: @@ -859,8 +859,8 @@ Test nested field with sibling field resolving to DocValueFetcher: --- Test token_count inside nested field doesn't fail: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: 'fix introduced in 7.12.0' - do: indices.create: @@ -897,8 +897,8 @@ Test token_count inside nested field doesn't fail: --- error includes field name: - - skip: - version: ' - 7.15.99' + - requires: + cluster_features: "gte_v7.16.0" reason: 'error changed in 7.16.0' - do: @@ -934,8 +934,8 @@ error includes field name: --- error includes glob pattern: - - skip: - version: ' - 7.15.99' + - requires: + cluster_features: "gte_v7.16.0" reason: 'error changed in 7.16.0' - do: @@ -972,8 +972,8 @@ error includes glob pattern: --- error for flattened includes whole path: - - skip: - version: ' - 7.15.99' + - requires: + cluster_features: "gte_v7.16.0" reason: 'error changed in 7.16.0' - do: @@ -1011,8 +1011,8 @@ error for flattened includes whole path: --- test fetching metadata fields: - - skip: - version: ' - 7.99.99' + - requires: + cluster_features: "gte_v8.0.0" reason: 'fetching metadata via fields introduced in 8.0' - do: @@ -1123,8 +1123,8 @@ fetch geo_point: --- "Test with subobjects: false": - - skip: - version: ' - 8.9.99' + - requires: + cluster_features: "gte_v8.10.0" reason: 'https://github.com/elastic/elasticsearch/issues/96700 fixed in 8.10.0' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml index 22f1e08ff5c29..455d06ba2a984 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml @@ -48,8 +48,8 @@ --- "binary synthetic source": - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: synthetic source support introduced in 8.15 - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 7625f19557e9b..dc79961ae78cd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -21,8 +21,8 @@ setup: --- fetch fields: - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: _ignored is returned only from 8.15 on - do: @@ -56,8 +56,8 @@ fetch fields: --- fetch source: - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: _ignored is returned only from 8.15 on - do: @@ -87,8 +87,8 @@ fetch source: --- fetch nested source: - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: _ignored is returned only from 8.15 on - do: @@ -156,8 +156,8 @@ fetch nested source: --- disabling stored fields removes fetch sub phases: - - skip: - version: ' - 7.15.99' + - requires: + cluster_features: "gte_v7.16.0" reason: fetch profiling implemented in 7.16.0 - do: @@ -173,8 +173,8 @@ disabling stored fields removes fetch sub phases: --- dfs knn vector profiling: - - skip: - version: ' - 8.6.99' + - requires: + cluster_features: "gte_v8.7.0" reason: multi-knn dfs profiling implemented in 8.7.0 - do: @@ -237,8 +237,8 @@ dfs knn vector profiling: --- dfs knn vector profiling with vector_operations_count: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: vector_operations_count in dfs profiling added in 8.12.0 - do: @@ -303,8 +303,8 @@ dfs knn vector profiling with vector_operations_count: --- dfs profile for search with dfs_query_then_fetch: - - skip: - version: ' - 8.5.99' + - requires: + cluster_features: "gte_v8.6.0" reason: dfs profiling implemented in 8.6.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/380_sort_segments_on_timestamp.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/380_sort_segments_on_timestamp.yml index 34852a7b49624..4795b2096cfa0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/380_sort_segments_on_timestamp.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/380_sort_segments_on_timestamp.yml @@ -111,10 +111,10 @@ --- "Test if segments are missing @timestamp field we don't get errors": - - skip: - version: "- 7.99.99" + - requires: + cluster_features: "gte_v8.0.0" reason: "sorting segments was added in 7.16" - features: allowed_warnings + test_runner_features: "allowed_warnings" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml index 355ffeebfb1d3..4c1adc3c6c528 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.15.0" reason: 'no trimming highlight snippets when number_of_fragments is 0 was introduced in 8.14' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml index d5f8eb4b0762d..2b309f502f0c2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml @@ -144,8 +144,7 @@ fetch _seq_no via fields: --- fetch fields with none stored_fields: - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/107466" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/107466" - do: catch: "bad_request" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml index 52e80887f6b95..d4cf3ade2aa4e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'ingest simulate added in 8.12' --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml index 3af4c1ff90394..322148f4e82ec 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml @@ -1,9 +1,10 @@ --- setup: - skip: - version: "8.7.00 - 8.9.99" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - --- add time series mappings: - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/100_composite.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/100_composite.yml index 920111fafb07b..c5fe17b251d84 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/100_composite.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/100_composite.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "tsdb indexing changed in 8.2.0" - skip: - version: " - 8.1.99,8.7.00 - 8.9.99" - reason: "tsdb indexing changed in 8.2.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml index 7efb5f5e56926..485b5b1796ec4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml @@ -1,7 +1,9 @@ --- setup: - skip: - version: "8.7.00 - 8.9.99" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/110_field_caps.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/110_field_caps.yml index 4192bdf0cf2fb..5dbd0682947c2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/110_field_caps.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/110_field_caps.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.5.0" + reason: "metric params only on time series indexes introduced in 8.5.0" - skip: - version: " - 8.4.99,8.7.00 - 8.9.99" - reason: "metric params only on time series indexes introduced in 8.5.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml index 9f9d59317454b..5b90dcb705dba 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml @@ -1,7 +1,9 @@ --- setup: - skip: - version: "8.7.00 - 8.9.99" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index b710f6b313da0..ade153d284548 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -1,7 +1,9 @@ --- setup: - skip: - version: "8.7.00 - 8.9.99" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml index 621906820e4ad..973832cf3ca73 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: "- 8.13.99" + - requires: + cluster_features: "gte_v8.14.0" reason: _tsid hashing introduced in 8.13 and tsid routing changed in 8.14 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml index 962926ca81fad..3c76653960386 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "_tsid hashing introduced in 8.13" - skip: - version: " - 8.1.99,8.7.00 - 8.12.99" - reason: _tsid hashing introduced in 8.13 + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.13.0" + reason: "_tsid hashing introduced in 8.13" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/50_alias.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/50_alias.yml index 5c5dc02ad4d09..9b1783b852a9f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/50_alias.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/50_alias.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "tsdb indexing changed in 8.2.0" - skip: - version: " - 8.1.99,8.7.00 - 8.9.99" - reason: "tsdb indexing changed in 8.2.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/80_index_resize.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/80_index_resize.yml index 12b29f68050bd..c32d3c50b0784 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/80_index_resize.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/80_index_resize.yml @@ -1,9 +1,14 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "tsdb indexing changed in 8.2.0" + test_runner_features: "arbitrary_key" - skip: - version: " - 8.1.99,8.7.00 - 8.12.99" - reason: _tsid hashing introduced in 8.13 - features: "arbitrary_key" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.13.0" + reason: "_tsid hashing introduced in 8.13" # Force allocating all shards to a single node so that we can shrink later. # In production you can move the shards to the single node after they've been diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml index 5f1368abcf436..976ac8f08f795 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "tsdb indexing changed in 8.2.0" - skip: - version: " - 8.1.99,8.7.00 - 8.9.99" - reason: "tsdb indexing changed in 8.2.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml index d9a0f65f36170..81be6f82d8a14 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml @@ -2,8 +2,7 @@ "Metadata Fields": - skip: - version: "all" - reason: "Update doesn't return metadata fields, waiting for #3259" + awaits_fix: "Update doesn't return metadata fields, waiting for #3259" - do: indices.create: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java index 97f052367fbc6..7b26cc5edf1bc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -14,8 +14,10 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; @@ -38,12 +40,29 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class NodeIndexingMetricsIT extends ESIntegTestCase { + public static class TestAPMInternalSettings extends Plugin { + @Override + public List> getSettings() { + return List.of( + Setting.timeSetting("telemetry.agent.metrics_interval", TimeValue.timeValueSeconds(0), Setting.Property.NodeScope) + ); + } + } + @Override protected Collection> nodePlugins() { - return List.of(TestTelemetryPlugin.class); + return List.of(TestTelemetryPlugin.class, TestAPMInternalSettings.class); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put("telemetry.agent.metrics_interval", TimeValue.timeValueSeconds(0)) // disable metrics cache refresh delay + .build(); } - public void testNodeIndexingMetricsArePublishing() throws Exception { + public void testNodeIndexingMetricsArePublishing() { final String dataNode = internalCluster().startNode(); ensureStableCluster(1); @@ -74,107 +93,108 @@ public void testNodeIndexingMetricsArePublishing() throws Exception { // simulate async apm `polling` call for metrics plugin.collect(); - assertBusy(() -> { - var indexingTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.docs.total"); - assertThat(indexingTotal.getLong(), equalTo((long) docsCount)); + var indexingTotal = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.docs.total"); + assertThat(indexingTotal.getLong(), equalTo((long) docsCount)); - var indexingCurrent = getRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.docs.current.total"); - assertThat(indexingCurrent.getLong(), equalTo(0L)); + var indexingCurrent = getSingleRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.docs.current.total"); + assertThat(indexingCurrent.getLong(), equalTo(0L)); - var indexingFailedTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.indexing.failed.total"); - assertThat(indexingFailedTotal.getLong(), equalTo(0L)); + var indexingFailedTotal = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.indexing.failed.total"); + assertThat(indexingFailedTotal.getLong(), equalTo(0L)); - var deletionTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.deletion.docs.total"); - assertThat(deletionTotal.getLong(), equalTo((long) deletesCount)); + var deletionTotal = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.deletion.docs.total"); + assertThat(deletionTotal.getLong(), equalTo((long) deletesCount)); - var deletionCurrent = getRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.deletion.docs.current.total"); - assertThat(deletionCurrent.getLong(), equalTo(0L)); + var deletionCurrent = getSingleRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.deletion.docs.current.total"); + assertThat(deletionCurrent.getLong(), equalTo(0L)); - var indexingTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.time"); - assertThat(indexingTime.getLong(), greaterThan(0L)); + var indexingTime = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.time"); + assertThat(indexingTime.getLong(), greaterThan(0L)); - var deletionTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.deletion.time"); - assertThat(deletionTime.getLong(), greaterThanOrEqualTo(0L)); + var deletionTime = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.deletion.time"); + assertThat(deletionTime.getLong(), greaterThanOrEqualTo(0L)); - var throttleTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.throttle.time"); - assertThat(throttleTime.getLong(), equalTo(0L)); + var throttleTime = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.throttle.time"); + assertThat(throttleTime.getLong(), equalTo(0L)); - var noopTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.noop.total"); - assertThat(noopTotal.getLong(), equalTo(0L)); + var noopTotal = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.noop.total"); + assertThat(noopTotal.getLong(), equalTo(0L)); - var coordinatingOperationsSize = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.coordinating_operations.size" - ); - assertThat(coordinatingOperationsSize.getLong(), greaterThan(0L)); - - var coordinatingOperationsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.coordinating_operations.total" - ); - // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks - assertThat(coordinatingOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); + var coordinatingOperationsSize = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.size" + ); + assertThat(coordinatingOperationsSize.getLong(), greaterThan(0L)); - var coordinatingOperationsCurrentSize = getRecordedMetric( - plugin::getLongGaugeMeasurement, - "es.indexing.coordinating_operations.current.size" - ); - assertThat(coordinatingOperationsCurrentSize.getLong(), equalTo(0L)); + var coordinatingOperationsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.total" + ); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(coordinatingOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); - var coordinatingOperationsCurrentTotal = getRecordedMetric( - plugin::getLongGaugeMeasurement, - "es.indexing.coordinating_operations.current.total" - ); - assertThat(coordinatingOperationsCurrentTotal.getLong(), equalTo(0L)); + var coordinatingOperationsCurrentSize = getSingleRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.coordinating_operations.current.size" + ); + assertThat(coordinatingOperationsCurrentSize.getLong(), equalTo(0L)); - var coordinatingOperationsRejectionsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.coordinating_operations.rejections.total" - ); - assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(0L)); + var coordinatingOperationsCurrentTotal = getSingleRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.coordinating_operations.current.total" + ); + assertThat(coordinatingOperationsCurrentTotal.getLong(), equalTo(0L)); - var coordinatingOperationsRejectionsRatio = getRecordedMetric( - plugin::getDoubleGaugeMeasurement, - "es.indexing.coordinating_operations.rejections.ratio" - ); - assertThat(coordinatingOperationsRejectionsRatio.getDouble(), equalTo(0.0)); + var coordinatingOperationsRejectionsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.rejections.total" + ); + assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(0L)); - var primaryOperationsSize = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.size"); - assertThat(primaryOperationsSize.getLong(), greaterThan(0L)); + var coordinatingOperationsRejectionsRatio = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.requests.total" + ); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(coordinatingOperationsRejectionsRatio.getLong(), equalTo((long) docsCount + deletesCount)); - var primaryOperationsTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.total"); - // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks - assertThat(primaryOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); + var primaryOperationsSize = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.size"); + assertThat(primaryOperationsSize.getLong(), greaterThan(0L)); - var primaryOperationsCurrentSize = getRecordedMetric( - plugin::getLongGaugeMeasurement, - "es.indexing.primary_operations.current.size" - ); - assertThat(primaryOperationsCurrentSize.getLong(), equalTo(0L)); + var primaryOperationsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.total" + ); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(primaryOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); - var primaryOperationsCurrentTotal = getRecordedMetric( - plugin::getLongGaugeMeasurement, - "es.indexing.primary_operations.current.total" - ); - assertThat(primaryOperationsCurrentTotal.getLong(), equalTo(0L)); + var primaryOperationsCurrentSize = getSingleRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.primary_operations.current.size" + ); + assertThat(primaryOperationsCurrentSize.getLong(), equalTo(0L)); - var primaryOperationsRejectionsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.primary_operations.rejections.total" - ); - assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo(0L)); + var primaryOperationsCurrentTotal = getSingleRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.primary_operations.current.total" + ); + assertThat(primaryOperationsCurrentTotal.getLong(), equalTo(0L)); - var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( - plugin::getDoubleGaugeMeasurement, - "es.indexing.primary_operations.document.rejections.ratio" - ); - assertThat(primaryOperationsDocumentRejectionsRatio.getDouble(), equalTo(0.0)); + var primaryOperationsRejectionsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.rejections.total" + ); + assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo(0L)); - }); + var primaryOperationsDocumentRejectionsRatio = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.document.rejections.total" + ); + assertThat(primaryOperationsDocumentRejectionsRatio.getLong(), equalTo(0L)); } - public void testCoordinatingRejectionMetricsArePublishing() throws Exception { + public void testCoordinatingRejectionMetricsArePublishing() { // lower Indexing Pressure limits to trigger coordinating rejections final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB")); @@ -200,23 +220,91 @@ public void testCoordinatingRejectionMetricsArePublishing() throws Exception { // simulate async apm `polling` call for metrics plugin.collect(); - // this bulk request is too big to pass coordinating limit check - assertBusy(() -> { - var coordinatingOperationsRejectionsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.coordinating_operations.rejections.total" - ); - assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(1L)); + // this bulk request is too big to pass coordinating limit check, it has to be reported towards `rejections` total metric + var coordinatingOperationsRejectionsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.rejections.total" + ); + assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(1L)); - var coordinatingOperationsRejectionsRatio = getRecordedMetric( - plugin::getDoubleGaugeMeasurement, - "es.indexing.coordinating_operations.rejections.ratio" - ); - assertThat(coordinatingOperationsRejectionsRatio.getDouble(), equalTo(1.0)); - }); + // `requests` metric should remain to `0` + var coordinatingOperationsRequestsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.requests.total" + ); + assertThat(coordinatingOperationsRequestsTotal.getLong(), equalTo(0L)); } - public void testPrimaryDocumentRejectionMetricsArePublishing() throws Exception { + public void testCoordinatingRejectionMetricsSpiking() throws Exception { + + // lower Indexing Pressure limits to trigger coordinating rejections + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB")); + ensureStableCluster(1); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + assertAcked(prepareCreate("test").get()); + + // simulate steady processing of bulk requests + // every request should pass thru coordinating limit check + int successfulBulkCount = randomIntBetween(10, 200); + for (int bulk = 0; bulk < successfulBulkCount; bulk++) { + final BulkRequestBuilder bulkRequestBuilder = new BulkRequestBuilder(client(dataNode)); + final int batchSize = randomIntBetween(1, 5); + for (int i = 0; i < batchSize; i++) { + bulkRequestBuilder.add(new IndexRequest("test").source("field", randomAlphaOfLength(10))); + } + BulkResponse bulkResponse = bulkRequestBuilder.get(); + assertFalse(bulkResponse.hasFailures()); + } + + // simulate async apm `polling` call for metrics + plugin.collect(); + + // assert no rejections were reported + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.coordinating_operations.rejections.total") + .getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.coordinating_operations.requests.total").getLong(), + equalTo((long) successfulBulkCount) + ); + + // simulate spike of rejected coordinating operations after steady processing + int rejectedBulkCount = randomIntBetween(1, 20); + for (int bulk = 0; bulk < rejectedBulkCount; bulk++) { + final BulkRequestBuilder bulkRequestBuilder = new BulkRequestBuilder(client(dataNode)); + final int batchSize = randomIntBetween(100, 1000); + for (int i = 0; i < batchSize; i++) { + bulkRequestBuilder.add(new IndexRequest("test").source("field", randomAlphaOfLength(100))); + } + // big batch should not pass thru coordinating limit check + expectThrows(EsRejectedExecutionException.class, bulkRequestBuilder); + } + + // simulate async apm `polling` call for metrics + plugin.collect(); + + assertThat( + getLatestRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.coordinating_operations.rejections.total") + .getLong(), + equalTo((long) rejectedBulkCount) + ); + // number of successfully processed coordinating requests should remain as seen before + assertThat( + getLatestRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.coordinating_operations.requests.total").getLong(), + equalTo((long) successfulBulkCount) + ); + + } + + public void testPrimaryDocumentRejectionMetricsArePublishing() { // setting low Indexing Pressure limits to trigger primary rejections final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "2KB").build()); @@ -264,34 +352,119 @@ public void testPrimaryDocumentRejectionMetricsArePublishing() throws Exception plugin.collect(); // this bulk request is too big to pass coordinating limit check - assertBusy(() -> { - var primaryOperationsRejectionsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.primary_operations.rejections.total" + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.rejections.total").getLong(), + equalTo((long) numberOfShards) + ); + + // all unsuccessful indexing operations (aka documents) should be reported towards `.document.rejections.total` metric + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.document.rejections.total") + .getLong(), + equalTo((long) batchCountOne) + ); + + // all successful indexing operations (aka documents) should be reported towards `.primary_operations.total` metric + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.total").getLong(), + equalTo((long) batchCountTwo) + ); + } + + public void testPrimaryDocumentRejectionMetricsFluctuatingOverTime() throws Exception { + + // setting low Indexing Pressure limits to trigger primary rejections + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "4KB").build()); + // setting high Indexing Pressure limits to pass coordinating checks + final String coordinatingNode = internalCluster().startCoordinatingOnlyNode( + Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "100MB").build() + ); + ensureStableCluster(2); + + // for simplicity do not mix small and big documents in single index/shard + assertAcked(prepareCreate("test-index-one", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)).get()); + assertAcked(prepareCreate("test-index-two", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)).get()); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + // how many times are we going to gauge metrics + // simulate time flow and assert that results of previous calls do not impact current metric readings + int numberOfMetricCollectionRounds = randomIntBetween(2, 10); + logger.debug("--> running {} rounds of gauging metrics", numberOfMetricCollectionRounds); + + // to simulate cumulative property of underneath metric counters + int prevRejectedDocumentsNumber = 0; + int prevAcceptedDocumentsNumber = 0; + + for (int i = 0; i < numberOfMetricCollectionRounds; i++) { + + final BulkRequest bulkRequestOne = new BulkRequest(); + + // construct bulk request of small and big documents (big are not supposed to pass thru a primary memory limit gate) + int acceptedDocumentsNumber = randomIntBetween(1, 5); + for (int j = 0; j < acceptedDocumentsNumber; j++) { + bulkRequestOne.add(new IndexRequest("test-index-one").source("field", randomAlphaOfLength(1))); + } + + final BulkRequest bulkRequestTwo = new BulkRequest(); + int rejectedDocumentsNumber = randomIntBetween(1, 20); + for (int j = 0; j < rejectedDocumentsNumber; j++) { + bulkRequestTwo.add(new IndexRequest("test-index-two").source("field", randomAlphaOfLength(5120))); + } + + logger.debug("--> round: {}, small docs: {}, big docs: {}", i, acceptedDocumentsNumber, rejectedDocumentsNumber); + + // requests are sent thru coordinating node + + final BulkResponse bulkResponseOne = client(coordinatingNode).bulk(bulkRequestOne).actionGet(); + assertThat(bulkResponseOne.hasFailures(), equalTo(false)); + + final BulkResponse bulkResponseTwo = client(coordinatingNode).bulk(bulkRequestTwo).actionGet(); + assertThat(bulkResponseTwo.hasFailures(), equalTo(true)); + assertThat( + Arrays.stream(bulkResponseTwo.getItems()).filter(r -> r.status() == RestStatus.TOO_MANY_REQUESTS).count(), + equalTo((long) rejectedDocumentsNumber) ); - assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo((long) numberOfShards)); - var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( - plugin::getDoubleGaugeMeasurement, - "es.indexing.primary_operations.document.rejections.ratio" + // simulate async apm `polling` call for metrics + plugin.collect(); + + // all unsuccessful indexing operations (aka documents) should be reported towards `.document.rejections.total` metric + assertThat( + getLatestRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.document.rejections.total") + .getLong(), + equalTo((long) rejectedDocumentsNumber + prevRejectedDocumentsNumber) ); - // ratio of rejected documents vs all indexing documents + prevRejectedDocumentsNumber += rejectedDocumentsNumber; + + // all successful indexing operations (aka documents) should be reported towards `.primary_operations.total` metric assertThat( - equals(primaryOperationsDocumentRejectionsRatio.getDouble(), (double) batchCountOne / (batchCountOne + batchCountTwo)), - equalTo(true) + getLatestRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.total").getLong(), + equalTo((long) acceptedDocumentsNumber + prevAcceptedDocumentsNumber) ); - }); + prevAcceptedDocumentsNumber += acceptedDocumentsNumber; + } } - private static Measurement getRecordedMetric(Function> metricGetter, String name) { + private static Measurement getSingleRecordedMetric(Function> metricGetter, String name) { final List measurements = metricGetter.apply(name); assertFalse("Indexing metric is not recorded", measurements.isEmpty()); assertThat(measurements.size(), equalTo(1)); return measurements.get(0); } - private static boolean equals(double expected, double actual) { + private static Measurement getLatestRecordedMetric(Function> metricGetter, String name) { + final List measurements = metricGetter.apply(name); + assertFalse("Indexing metric is not recorded", measurements.isEmpty()); + return measurements.get(measurements.size() - 1); + } + + private static boolean doublesEquals(double expected, double actual) { final double eps = .0000001; return Math.abs(expected - actual) < eps; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java new file mode 100644 index 0000000000000..7e4ae040caeca --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.nodescapabilities; + +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesResponse; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +public class SimpleNodesCapabilitiesIT extends ESIntegTestCase { + + public void testNodesCapabilities() throws IOException { + internalCluster().startNodes(2); + + ClusterHealthResponse clusterHealth = clusterAdmin().prepareHealth().setWaitForGreenStatus().setWaitForNodes("2").get(); + logger.info("--> done cluster_health, status {}", clusterHealth.getStatus()); + + // check we support the capabilities API itself. Which we do. + NodesCapabilitiesResponse response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), is(true)); + + // check we support some parameters of the capabilities API + response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "path")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), is(true)); + + // check we don't support some other parameters of the capabilities API + response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "invalid")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), is(false)); + + // check we don't support a random invalid api + // TODO this is not working yet - see https://github.com/elastic/elasticsearch/issues/107425 + /*response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_invalid")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), is(false));*/ + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java index 1f8d55516d508..b7a1dc12406d2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java @@ -215,10 +215,9 @@ public Settings onNodeStopped(String nodeName) throws Exception { } } - private Tuple setupClusterStateListenerForError(String node) { + private CountDownLatch setupClusterStateListenerForError(String node) { ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); - AtomicLong metadataVersion = new AtomicLong(-1); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { @@ -231,13 +230,16 @@ public void clusterChanged(ClusterChangedEvent event) { containsString("Missing handler definition for content key [not_cluster_settings]") ); clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); savedClusterState.countDown(); } } }); - return new Tuple<>(savedClusterState, metadataVersion); + // we need this after we setup the listener above, in case the node started and processed + // settings before we set our listener to cluster state changes. + causeClusterStateUpdate(); + + return savedClusterState; } private void writeFileSettings(String json) throws Exception { @@ -269,22 +271,49 @@ public void testNotReadyOnBadFileSettings() throws Exception { assertMasterNode(internalCluster().nonMasterClient(), masterNode); var savedClusterState = setupClusterStateListenerForError(masterNode); - // we need this after we setup the listener above, in case the node started and processed - // settings before we set our listener to cluster state changes. - causeClusterStateUpdate(); - FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); assertTrue(masterFileSettingsService.watching()); assertFalse(dataFileSettingsService.watching()); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + boolean awaitSuccessful = savedClusterState.await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); ReadinessService s = internalCluster().getInstance(ReadinessService.class, internalCluster().getMasterName()); assertNull(s.boundAddress()); } + public void testReadyAfterRestartWithBadFileSettings() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + writeFileSettings(testJSON); + + logger.info("--> start data node / non master node"); + String dataNode = internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); + String masterNode = internalCluster().startMasterOnlyNode(); + + assertMasterNode(internalCluster().nonMasterClient(), masterNode); + assertBusy(() -> assertTrue("master node ready", internalCluster().getInstance(ReadinessService.class, masterNode).ready())); + assertBusy(() -> assertTrue("data node ready", internalCluster().getInstance(ReadinessService.class, dataNode).ready())); + + logger.info("--> stop master node"); + Settings masterDataPathSettings = internalCluster().dataPathSettings(internalCluster().getMasterName()); + internalCluster().stopCurrentMasterNode(); + expectMasterNotFound(); + + logger.info("--> write bad file settings before restarting master node"); + writeFileSettings(testErrorJSON); + + logger.info("--> restart master node"); + String nextMasterNode = internalCluster().startNode(Settings.builder().put(nonDataNode(masterNode())).put(masterDataPathSettings)); + + assertMasterNode(internalCluster().nonMasterClient(), nextMasterNode); + + var savedClusterState = setupClusterStateListenerForError(nextMasterNode); + assertTrue(savedClusterState.await(20, TimeUnit.SECONDS)); + + assertTrue("master node ready on restart", internalCluster().getInstance(ReadinessService.class, nextMasterNode).ready()); + } + public void testReadyWhenMissingFileSettings() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 475158c7a8709..e6b944262094d 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -65,6 +65,7 @@ exports org.elasticsearch.action.admin.cluster.desirednodes; exports org.elasticsearch.action.admin.cluster.health; exports org.elasticsearch.action.admin.cluster.migration; + exports org.elasticsearch.action.admin.cluster.node.capabilities; exports org.elasticsearch.action.admin.cluster.node.hotthreads; exports org.elasticsearch.action.admin.cluster.node.info; exports org.elasticsearch.action.admin.cluster.node.reload; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index f9ab7944714a4..db43a12cf9014 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -192,6 +192,11 @@ static TransportVersion def(int id) { public static final TransportVersion ROLE_REMOTE_CLUSTER_PRIVS = def(8_649_00_0); public static final TransportVersion NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS = def(8_650_00_0); public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX = def(8_651_00_0); + public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0); + public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0); + public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0); + public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index d07717857169b..21f3df2ab7175 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -31,17 +31,94 @@ import static org.elasticsearch.action.ActionListenerImplementations.safeOnFailure; /** - * A listener for action responses or failures. + *

+ * Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code + * which doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become + * available. They support several useful control flows: + *

+ *
    + *
  • They can be completed immediately on the calling thread.
  • + *
  • They can be completed concurrently on a different thread.
  • + *
  • They can be stored in a data structure and completed later on when the system reaches a particular state.
  • + *
  • Most commonly, they can be passed on to other methods that themselves require a callback.
  • + *
  • They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run + * before or after completion, before passing them on.
  • + *
+ *

+ * {@link ActionListener} is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. {@link + * ActionListener} is used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes + * it easier to compose parts of the system together without needing to build adapters to convert back and forth between different kinds of + * callback. It also makes it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely + * takes practice and is certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with {@link + * ActionListener} instances themselves, creating new instances out of existing ones and completing them in interesting ways. See for + * instance: + *

+ *
    + *
  • All the static methods on {@link ActionListener} itself.
  • + *
  • {@link org.elasticsearch.action.support.ThreadedActionListener} for forking work elsewhere.
  • + *
  • {@link org.elasticsearch.action.support.RefCountingListener} for running work in parallel.
  • + *
  • {@link org.elasticsearch.action.support.SubscribableListener} for constructing flexible workflows.
  • + *
+ *

+ * Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous + * code without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too + * expensive to waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means + * that most of our code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes + * a callback. The entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at {@link + * org.elasticsearch.rest.BaseRestHandler}{@code #prepareRequest} and transport APIs all start at {@link + * org.elasticsearch.action.support.TransportAction}{@code #doExecute} and the whole system fundamentally works in terms of an event loop + * (an {@code io.netty.channel.EventLoop}) which processes network events via callbacks. + *

+ *

+ * {@link ActionListener} is not an ad-hoc invention. Formally speaking, it is our implementation of the general concept of a + * continuation in the sense of continuation-passing style + * (CPS): an extra argument to a function which defines how to continue the computation when the result is available. This is in contrast to + * direct style which is the more usual style of calling methods that return values directly back to the caller so they can continue + * executing as normal. There's essentially two ways that computation can continue in Java (it can return a value or it can throw an + * exception) which is why {@link ActionListener} has both an {@link #onResponse} and an {@link #onFailure} method. + *

+ *

+ * CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS + * also enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in + * parallel, perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be + * satisfied before proceeding (e.g. {@link org.elasticsearch.action.support.SubscribableListener} amongst many others). Some languages have + * first-class support for continuations (e.g. the {@code async} and {@code await} primitives in C#) allowing the programmer to write code + * in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all the callbacks + * ourselves. + *

+ *

+ * Strictly speaking, CPS requires that a computation only continues by calling the continuation. In Elasticsearch, this means that + * asynchronous methods must have {@code void} return type and may not throw any exceptions. This is mostly the case in our code as written + * today, and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In + * particular, it's not uncommon to permit some methods to throw an exception, using things like {@link ActionListener#run} (or an + * equivalent {@code try ... catch ...} block) further up the stack to handle it. Some methods also take (and may complete) an {@link + * ActionListener} parameter, but still return a value separately for other local synchronous work. + *

+ *

+ * This pattern is often used in the transport action layer with the use of the {@link + * org.elasticsearch.action.support.ChannelActionListener} class, which wraps a {@link org.elasticsearch.transport.TransportChannel} + * produced by the transport layer.{@link org.elasticsearch.transport.TransportChannel} implementations can hold a reference to a Netty + * channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, + * so a call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, + * barring caller timeouts. + *

+ *

+ * Note that we explicitly avoid {@link java.util.concurrent.CompletableFuture} and other similar mechanisms as much as possible. They + * can achieve the same goals as {@link ActionListener}, but can also easily be misused in various ways that lead to severe bugs. In + * particular, futures support blocking while waiting for a result, but this is almost never appropriate in Elasticsearch's production code + * where threads are such a precious resource. Moreover if something throws an {@link Error} then the JVM should exit pretty much straight + * away, but {@link java.util.concurrent.CompletableFuture} can catch an {@link Error} which delays the JVM exit until its result is + * observed. This may be much later, or possibly even never. It's not possible to introduce such bugs when using {@link ActionListener}. + *

*/ public interface ActionListener { /** - * Handle action response. This response may constitute a failure or a - * success but it is up to the listener to make that decision. + * Complete this listener with a successful (or at least, non-exceptional) response. */ void onResponse(Response response); /** - * A failure caused by an exception at some phase of the task. + * Complete this listener with an exceptional response. */ void onFailure(Exception e); diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index ef73d0470b43e..ab93f98c5648b 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeAction; import org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction; import org.elasticsearch.action.admin.cluster.migration.TransportPostFeatureUpgradeAction; +import org.elasticsearch.action.admin.cluster.node.capabilities.TransportNodesCapabilitiesAction; import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.admin.cluster.node.reload.TransportNodesReloadSecureSettingsAction; @@ -284,6 +285,7 @@ import org.elasticsearch.rest.action.admin.cluster.RestGetStoredScriptAction; import org.elasticsearch.rest.action.admin.cluster.RestGetTaskAction; import org.elasticsearch.rest.action.admin.cluster.RestListTasksAction; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesHotThreadsAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesInfoAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesStatsAction; @@ -616,6 +618,7 @@ public void reg actions.register(TransportNodesInfoAction.TYPE, TransportNodesInfoAction.class); actions.register(TransportRemoteInfoAction.TYPE, TransportRemoteInfoAction.class); + actions.register(TransportNodesCapabilitiesAction.TYPE, TransportNodesCapabilitiesAction.class); actions.register(RemoteClusterNodesAction.TYPE, RemoteClusterNodesAction.TransportAction.class); actions.register(TransportNodesStatsAction.TYPE, TransportNodesStatsAction.class); actions.register(TransportNodesUsageAction.TYPE, TransportNodesUsageAction.class); @@ -833,6 +836,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestClearVotingConfigExclusionsAction()); registerHandler.accept(new RestNodesInfoAction(settingsFilter)); registerHandler.accept(new RestRemoteClusterInfoAction()); + registerHandler.accept(new RestNodesCapabilitiesAction()); registerHandler.accept(new RestNodesStatsAction()); registerHandler.accept(new RestNodesUsageAction()); registerHandler.accept(new RestNodesHotThreadsAction()); @@ -1029,6 +1033,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< @Override protected void configure() { + bind(RestController.class).toInstance(restController); bind(ActionFilters.class).toInstance(actionFilters); bind(DestructiveOperations.class).toInstance(destructiveOperations); bind(new TypeLiteral>() { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java new file mode 100644 index 0000000000000..c26aa673d13fd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class NodeCapability extends BaseNodeResponse { + + private final boolean supported; + + public NodeCapability(StreamInput in) throws IOException { + super(in); + + supported = in.readBoolean(); + } + + public NodeCapability(boolean supported, DiscoveryNode node) { + super(node); + this.supported = supported; + } + + public boolean isSupported() { + return supported; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + + out.writeBoolean(supported); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java new file mode 100644 index 0000000000000..c69d273727238 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.rest.RestRequest; + +import java.util.Set; + +public class NodesCapabilitiesRequest extends BaseNodesRequest { + + private RestRequest.Method method = RestRequest.Method.GET; + private String path = "/"; + private Set parameters = Set.of(); + private Set capabilities = Set.of(); + private RestApiVersion restApiVersion = RestApiVersion.current(); + + public NodesCapabilitiesRequest() { + // always send to all nodes + super(Strings.EMPTY_ARRAY); + } + + public NodesCapabilitiesRequest path(String path) { + this.path = path; + return this; + } + + public String path() { + return path; + } + + public NodesCapabilitiesRequest method(RestRequest.Method method) { + this.method = method; + return this; + } + + public RestRequest.Method method() { + return method; + } + + public NodesCapabilitiesRequest parameters(String... parameters) { + this.parameters = Set.of(parameters); + return this; + } + + public Set parameters() { + return parameters; + } + + public NodesCapabilitiesRequest capabilities(String... capabilities) { + this.capabilities = Set.of(capabilities); + return this; + } + + public Set capabilities() { + return capabilities; + } + + public NodesCapabilitiesRequest restApiVersion(RestApiVersion restApiVersion) { + this.restApiVersion = restApiVersion; + return this; + } + + public RestApiVersion restApiVersion() { + return restApiVersion; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java new file mode 100644 index 0000000000000..63fdb9f7da08a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; + +public class NodesCapabilitiesResponse extends BaseNodesResponse implements ToXContentFragment { + protected NodesCapabilitiesResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return TransportAction.localOnly(); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + TransportAction.localOnly(); + } + + public boolean isSupported() { + return getNodes().isEmpty() == false && getNodes().stream().allMatch(NodeCapability::isSupported); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.field("supported", isSupported()); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java new file mode 100644 index 0000000000000..7e392775bf42e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java @@ -0,0 +1,140 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +public class TransportNodesCapabilitiesAction extends TransportNodesAction< + NodesCapabilitiesRequest, + NodesCapabilitiesResponse, + TransportNodesCapabilitiesAction.NodeCapabilitiesRequest, + NodeCapability> { + + public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/capabilities"); + + private final RestController restController; + + @Inject + public TransportNodesCapabilitiesAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + RestController restController + ) { + super( + TYPE.name(), + clusterService, + transportService, + actionFilters, + NodeCapabilitiesRequest::new, + threadPool.executor(ThreadPool.Names.MANAGEMENT) + ); + this.restController = restController; + } + + @Override + protected NodesCapabilitiesResponse newResponse( + NodesCapabilitiesRequest request, + List responses, + List failures + ) { + return new NodesCapabilitiesResponse(clusterService.getClusterName(), responses, failures); + } + + @Override + protected NodeCapabilitiesRequest newNodeRequest(NodesCapabilitiesRequest request) { + return new NodeCapabilitiesRequest( + request.method(), + request.path(), + request.parameters(), + request.capabilities(), + request.restApiVersion() + ); + } + + @Override + protected NodeCapability newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + return new NodeCapability(in); + } + + @Override + protected NodeCapability nodeOperation(NodeCapabilitiesRequest request, Task task) { + boolean supported = restController.checkSupported( + request.method, + request.path, + request.parameters, + request.capabilities, + request.restApiVersion + ); + return new NodeCapability(supported, transportService.getLocalNode()); + } + + public static class NodeCapabilitiesRequest extends TransportRequest { + private final RestRequest.Method method; + private final String path; + private final Set parameters; + private final Set capabilities; + private final RestApiVersion restApiVersion; + + public NodeCapabilitiesRequest(StreamInput in) throws IOException { + super(in); + + method = in.readEnum(RestRequest.Method.class); + path = in.readString(); + parameters = in.readCollectionAsImmutableSet(StreamInput::readString); + capabilities = in.readCollectionAsImmutableSet(StreamInput::readString); + restApiVersion = RestApiVersion.forMajor(in.readVInt()); + } + + public NodeCapabilitiesRequest( + RestRequest.Method method, + String path, + Set parameters, + Set capabilities, + RestApiVersion restApiVersion + ) { + this.method = method; + this.path = path; + this.parameters = Set.copyOf(parameters); + this.capabilities = Set.copyOf(capabilities); + this.restApiVersion = restApiVersion; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + + out.writeEnum(method); + out.writeString(path); + out.writeCollection(parameters, StreamOutput::writeString); + out.writeCollection(capabilities, StreamOutput::writeString); + out.writeVInt(restApiVersion.major); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index 6ffe7ac390260..70060fc834452 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -786,6 +786,7 @@ static class IndexPressureStats implements ToXContentFragment { long memoryLimit = 0; long totalCoordinatingOps = 0; + long totalCoordinatingRequests = 0; long totalPrimaryOps = 0; long totalReplicaOps = 0; long currentCoordinatingOps = 0; @@ -813,6 +814,7 @@ static class IndexPressureStats implements ToXContentFragment { currentPrimaryOps += nodeStatIndexingPressureStats.getCurrentPrimaryOps(); currentReplicaOps += nodeStatIndexingPressureStats.getCurrentReplicaOps(); primaryDocumentRejections += nodeStatIndexingPressureStats.getPrimaryDocumentRejections(); + totalCoordinatingRequests += nodeStatIndexingPressureStats.getTotalCoordinatingRequests(); } } indexingPressureStats = new IndexingPressureStats( @@ -834,7 +836,8 @@ static class IndexPressureStats implements ToXContentFragment { currentCoordinatingOps, currentPrimaryOps, currentReplicaOps, - primaryDocumentRejections + primaryDocumentRejections, + totalCoordinatingRequests ); } diff --git a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java index f2b9c5ef9631e..daae078ed9a68 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java @@ -21,6 +21,9 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesResponse; +import org.elasticsearch.action.admin.cluster.node.capabilities.TransportNodesCapabilitiesAction; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -248,6 +251,14 @@ public NodesStatsRequestBuilder prepareNodesStats(String... nodesIds) { return new NodesStatsRequestBuilder(this).setNodesIds(nodesIds); } + public ActionFuture nodesCapabilities(final NodesCapabilitiesRequest request) { + return execute(TransportNodesCapabilitiesAction.TYPE, request); + } + + public void nodesCapabilities(final NodesCapabilitiesRequest request, final ActionListener listener) { + execute(TransportNodesCapabilitiesAction.TYPE, request, listener); + } + public void nodesUsage(final NodesUsageRequest request, final ActionListener listener) { execute(TransportNodesUsageAction.TYPE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java index ec8200bf2d701..5df045df4ecd8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java @@ -47,6 +47,8 @@ public record ReservedStateMetadata( ) implements SimpleDiffable, ToXContentFragment { public static final Long NO_VERSION = Long.MIN_VALUE; // use min long as sentinel for uninitialized version + public static final Long EMPTY_VERSION = -1L; // use -1 as sentinel for empty metadata + public static final Long RESTORED_VERSION = 0L; // use 0 as sentinel for metadata restored from snapshot private static final ParseField VERSION = new ParseField("version"); private static final ParseField HANDLERS = new ParseField("handlers"); diff --git a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java index 817ccde4bad2e..4410ce69ddf54 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java @@ -26,6 +26,10 @@ * All feature checks should be done through {@code FeatureService} to ensure that Elasticsearch's * guarantees on the introduction of new functionality are followed; * that is, new functionality is not enabled until all nodes in the cluster support it. + *

+ * Note: {@link FeatureSpecification}s are loaded as service providers, however tests are not fully modularized yet. + * Make sure to also register new specifications in {@code META-INF/services/org.elasticsearch.features.FeatureSpecification}, + * so they are available in tests as well. */ public interface FeatureSpecification { /** diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index 7696cf99b75cd..7f07cdd1c3b1a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -46,6 +46,7 @@ public class IndexingPressure { private final AtomicLong totalReplicaBytes = new AtomicLong(0); private final AtomicLong totalCoordinatingOps = new AtomicLong(0); + private final AtomicLong totalCoordinatingRequests = new AtomicLong(0); private final AtomicLong totalPrimaryOps = new AtomicLong(0); private final AtomicLong totalReplicaOps = new AtomicLong(0); @@ -109,6 +110,7 @@ public Releasable markCoordinatingOperationStarted(int operations, long bytes, b totalCombinedCoordinatingAndPrimaryBytes.getAndAdd(bytes); totalCoordinatingBytes.getAndAdd(bytes); totalCoordinatingOps.getAndAdd(operations); + totalCoordinatingRequests.getAndIncrement(); return wrapReleasable(() -> { logger.trace(() -> Strings.format("removing [%d] coordinating operations and [%d] bytes", operations, bytes)); this.currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); @@ -221,7 +223,8 @@ public IndexingPressureStats stats() { currentCoordinatingOps.get(), currentPrimaryOps.get(), currentReplicaOps.get(), - primaryDocumentRejections.get() + primaryDocumentRejections.get(), + totalCoordinatingRequests.get() ); } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java b/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java index 270bcd2297a67..a69cc42163dd2 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java @@ -42,7 +42,7 @@ public class CombinedDeletionPolicy extends IndexDeletionPolicy { private final TranslogDeletionPolicy translogDeletionPolicy; private final SoftDeletesPolicy softDeletesPolicy; private final LongSupplier globalCheckpointSupplier; - private final Map snapshottedCommits; // Number of snapshots held against each commit point. + private final Map acquiredIndexCommits; // Number of references held against each commit point. interface CommitsListener { @@ -71,7 +71,7 @@ interface CommitsListener { this.softDeletesPolicy = softDeletesPolicy; this.globalCheckpointSupplier = globalCheckpointSupplier; this.commitsListener = commitsListener; - this.snapshottedCommits = new HashMap<>(); + this.acquiredIndexCommits = new HashMap<>(); } @Override @@ -120,7 +120,7 @@ public void onCommit(List commits) throws IOException { } for (int i = 0; i < keptPosition; i++) { final IndexCommit commit = commits.get(i); - if (snapshottedCommits.containsKey(commit) == false) { + if (acquiredIndexCommits.containsKey(commit) == false) { deleteCommit(commit); if (deletedCommits == null) { deletedCommits = new ArrayList<>(); @@ -213,7 +213,7 @@ synchronized IndexCommit acquireIndexCommit(boolean acquiringSafeCommit) { assert safeCommit != null : "Safe commit is not initialized yet"; assert lastCommit != null : "Last commit is not initialized yet"; final IndexCommit snapshotting = acquiringSafeCommit ? safeCommit : lastCommit; - snapshottedCommits.merge(snapshotting, 1, Integer::sum); // increase refCount + acquiredIndexCommits.merge(snapshotting, 1, Integer::sum); // increase refCount return wrapCommit(snapshotting); } @@ -224,27 +224,27 @@ protected IndexCommit wrapCommit(IndexCommit indexCommit) { /** * Releases an index commit that acquired by {@link #acquireIndexCommit(boolean)}. * - * @return true if the snapshotting commit can be clean up. + * @return true if the acquired commit can be clean up. */ - synchronized boolean releaseCommit(final IndexCommit snapshotCommit) { - final IndexCommit releasingCommit = ((SnapshotIndexCommit) snapshotCommit).getIndexCommit(); - assert snapshottedCommits.containsKey(releasingCommit) - : "Release non-snapshotted commit;" - + "snapshotted commits [" - + snapshottedCommits + synchronized boolean releaseCommit(final IndexCommit acquiredCommit) { + final IndexCommit releasingCommit = ((SnapshotIndexCommit) acquiredCommit).getIndexCommit(); + assert acquiredIndexCommits.containsKey(releasingCommit) + : "Release non-acquired commit;" + + "acquired commits [" + + acquiredIndexCommits + "], releasing commit [" + releasingCommit + "]"; // release refCount - final Integer refCount = snapshottedCommits.compute(releasingCommit, (key, count) -> { + final Integer refCount = acquiredIndexCommits.compute(releasingCommit, (key, count) -> { if (count == 1) { return null; } return count - 1; }); - assert refCount == null || refCount > 0 : "Number of snapshots can not be negative [" + refCount + "]"; - // The commit can be clean up only if no pending snapshot and it is neither the safe commit nor last commit. + assert refCount == null || refCount > 0 : "Number of references for acquired commit can not be negative [" + refCount + "]"; + // The commit can be clean up only if no refCount and it is neither the safe commit nor last commit. return refCount == null && releasingCommit.equals(safeCommit) == false && releasingCommit.equals(lastCommit) == false; } @@ -296,10 +296,10 @@ private static Set listOfNewFileNames(IndexCommit previous, IndexCommit } /** - * Checks whether the deletion policy is holding on to snapshotted commits + * Checks whether the deletion policy is holding on to acquired index commits */ - synchronized boolean hasSnapshottedCommits() { - return snapshottedCommits.isEmpty() == false; + synchronized boolean hasAcquiredIndexCommits() { + return acquiredIndexCommits.isEmpty() == false; } /** diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 69e3be9bb2113..80fed0f3092e5 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -664,8 +664,8 @@ Translog getTranslog() { } // Package private for testing purposes only - boolean hasSnapshottedCommits() { - return combinedDeletionPolicy.hasSnapshottedCommits(); + boolean hasAcquiredIndexCommits() { + return combinedDeletionPolicy.hasAcquiredIndexCommits(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java index b74abe3cc0790..ace891f9aead6 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java @@ -50,9 +50,7 @@ import org.elasticsearch.index.translog.TruncateTranslogAction; import java.io.IOException; -import java.io.OutputStream; import java.io.PrintStream; -import java.io.PrintWriter; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; @@ -60,6 +58,7 @@ import java.util.Map; import java.util.Objects; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.common.lucene.Lucene.indexWriterConfigWithNoMerging; public class RemoveCorruptedShardDataCommand extends ElasticsearchNodeCommand { @@ -249,13 +248,7 @@ public void processDataPaths(Terminal terminal, Path[] dataPaths, OptionSet opti throw new ElasticsearchException("translog directory [" + translogPath + "], must exist and be a directory"); } - final PrintWriter writer = terminal.getWriter(); - final PrintStream printStream = new PrintStream(new OutputStream() { - @Override - public void write(int b) { - writer.write(b); - } - }, false, "UTF-8"); + final PrintStream printStream = new PrintStream(terminal.asLineOutputStream(UTF_8), false, UTF_8); final boolean verbose = terminal.isPrintable(Terminal.Verbosity.VERBOSE); final Directory indexDirectory = getDirectory(indexPath); diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index 608fa3128bf09..1316776ec39b2 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -37,6 +37,7 @@ public class IndexingPressureStats implements Writeable, ToXContentFragment { // These fields will be used for additional back-pressure and metrics in the future private final long totalCoordinatingOps; + private final long totalCoordinatingRequests; private final long totalPrimaryOps; private final long totalReplicaOps; private final long currentCoordinatingOps; @@ -77,6 +78,12 @@ public IndexingPressureStats(StreamInput in) throws IOException { } else { primaryDocumentRejections = -1L; } + + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT)) { + totalCoordinatingRequests = in.readVLong(); + } else { + totalCoordinatingRequests = -1L; + } } public IndexingPressureStats( @@ -98,7 +105,8 @@ public IndexingPressureStats( long currentCoordinatingOps, long currentPrimaryOps, long currentReplicaOps, - long primaryDocumentRejections + long primaryDocumentRejections, + long totalCoordinatingRequests ) { this.totalCombinedCoordinatingAndPrimaryBytes = totalCombinedCoordinatingAndPrimaryBytes; this.totalCoordinatingBytes = totalCoordinatingBytes; @@ -121,6 +129,7 @@ public IndexingPressureStats( this.currentReplicaOps = currentReplicaOps; this.primaryDocumentRejections = primaryDocumentRejections; + this.totalCoordinatingRequests = totalCoordinatingRequests; } @Override @@ -146,6 +155,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT)) { out.writeVLong(primaryDocumentRejections); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT)) { + out.writeVLong(totalCoordinatingRequests); + } } public long getTotalCombinedCoordinatingAndPrimaryBytes() { @@ -224,6 +237,10 @@ public long getPrimaryDocumentRejections() { return primaryDocumentRejections; } + public long getTotalCoordinatingRequests() { + return totalCoordinatingRequests; + } + private static final String COMBINED = "combined_coordinating_and_primary"; private static final String COMBINED_IN_BYTES = "combined_coordinating_and_primary_in_bytes"; private static final String COORDINATING = "coordinating"; diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index fa2475921aa93..c682c44b47bab 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -1067,27 +1067,30 @@ public void accept(final IndexShard.ShardFailure shardFailure) { final ShardRouting shardRouting = shardFailure.routing(); threadPool.generic().execute(() -> { synchronized (IndicesClusterStateService.this) { - try { - CloseUtils.executeDirectly( - l -> failAndRemoveShard( - shardRouting, - true, - "shard failure, reason [" + shardFailure.reason() + "]", - shardFailure.cause(), - clusterService.state(), - EsExecutors.DIRECT_EXECUTOR_SERVICE /* NB holding mutex while closing shard, ES-8334 TODO revisit this? */, - l - ) - ); - } catch (Exception e) { - // should not be possible - final var wrappedException = new IllegalStateException( - "unexpected failure in FailedShardHandler on " + shardRouting, - e - ); - logger.error(wrappedException.getMessage(), e); - assert false : e; - } + ActionListener.run(ActionListener.assertOnce(new ActionListener() { + @Override + public void onResponse(Void unused) {} + + @Override + public void onFailure(Exception e) { + final var wrappedException = new IllegalStateException( + "unexpected failure in FailedShardHandler on " + shardRouting, + e + ); + logger.error(wrappedException.getMessage(), e); + assert false : e; + } + }), + l -> failAndRemoveShard( + shardRouting, + true, + "shard failure, reason [" + shardFailure.reason() + "]", + shardFailure.cause(), + clusterService.state(), + shardCloseExecutor, + l + ) + ); } }); } diff --git a/server/src/main/java/org/elasticsearch/inference/FilteredXContent.java b/server/src/main/java/org/elasticsearch/inference/FilteredXContent.java new file mode 100644 index 0000000000000..1c63aea61b7c8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/FilteredXContent.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.xcontent.ToXContentObject; + +/** + * Provides a contract for retrieving exposed fields. + */ +public interface FilteredXContent { + /** + * Returns a {@link ToXContentObject} that only writes the exposed fields. Any hidden fields are not written. + */ + ToXContentObject getFilteredXContentObject(); +} diff --git a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java index 6c1a01acb0dab..b143f74c848c1 100644 --- a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java @@ -12,12 +12,7 @@ import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.xcontent.ToXContentObject; -public interface ServiceSettings extends ToXContentObject, VersionedNamedWriteable { - - /** - * Returns a {@link ToXContentObject} that only writes the exposed fields. Any hidden fields are not written. - */ - ToXContentObject getFilteredXContentObject(); +public interface ServiceSettings extends ToXContentObject, VersionedNamedWriteable, FilteredXContent { /** * Similarity used in the service. Will be null if not applicable. diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index 68cbcdb5657f9..c46aa4181bf05 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -19,7 +19,6 @@ import org.elasticsearch.monitor.jvm.GcNames; import org.elasticsearch.monitor.jvm.JvmStats; import org.elasticsearch.node.NodeService; -import org.elasticsearch.telemetry.metric.DoubleWithAttributes; import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -529,23 +528,16 @@ private void registerAsyncMetrics(MeterRegistry registry) { ); metrics.add( - registry.registerDoubleGauge( - "es.indexing.coordinating_operations.rejections.ratio", - "Ratio of rejected coordinating operations", - "ratio", - () -> { - var totalCoordinatingOperations = Optional.ofNullable(stats.getOrRefresh()) - .map(NodeStats::getIndexingPressureStats) - .map(IndexingPressureStats::getTotalCoordinatingOps) - .orElse(0L); - var totalCoordinatingRejections = Optional.ofNullable(stats.getOrRefresh()) + registry.registerLongAsyncCounter( + "es.indexing.coordinating_operations.requests.total", + "Total number of coordinating requests", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) .map(NodeStats::getIndexingPressureStats) - .map(IndexingPressureStats::getCoordinatingRejections) - .orElse(0L); - // rejections do not count towards `totalCoordinatingOperations` - var totalOps = totalCoordinatingOperations + totalCoordinatingRejections; - return new DoubleWithAttributes(totalOps != 0 ? (double) totalCoordinatingRejections / totalOps : 0.0); - } + .map(IndexingPressureStats::getTotalCoordinatingRequests) + .orElse(0L) + ) ) ); @@ -620,23 +612,16 @@ private void registerAsyncMetrics(MeterRegistry registry) { ); metrics.add( - registry.registerDoubleGauge( - "es.indexing.primary_operations.document.rejections.ratio", - "Ratio of rejected primary operations", - "ratio", - () -> { - var totalPrimaryOperations = Optional.ofNullable(stats.getOrRefresh()) - .map(NodeStats::getIndexingPressureStats) - .map(IndexingPressureStats::getTotalPrimaryOps) - .orElse(0L); - var totalPrimaryDocumentRejections = Optional.ofNullable(stats.getOrRefresh()) + registry.registerLongAsyncCounter( + "es.indexing.primary_operations.document.rejections.total", + "Total number of rejected indexing documents", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) .map(NodeStats::getIndexingPressureStats) .map(IndexingPressureStats::getPrimaryDocumentRejections) - .orElse(0L); - // primary document rejections do not count towards `totalPrimaryOperations` - var totalOps = totalPrimaryOperations + totalPrimaryDocumentRejections; - return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryDocumentRejections / totalOps : 0.0); - } + .orElse(0L) + ) ) ); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index 8719c8cbf8730..f765ee591fb40 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -102,7 +102,7 @@ protected boolean shouldRefreshFileState(ClusterState clusterState) { // We check if the version was reset to 0, and force an update if a file exists. This can happen in situations // like snapshot restores. ReservedStateMetadata fileSettingsMetadata = clusterState.metadata().reservedStateMetadata().get(NAMESPACE); - return fileSettingsMetadata != null && fileSettingsMetadata.version() == 0L; + return fileSettingsMetadata != null && fileSettingsMetadata.version().equals(ReservedStateMetadata.RESTORED_VERSION); } /** diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index d2aea19417787..a281db9f02383 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -42,6 +42,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.ExceptionsHelper.stackTrace; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.EMPTY_VERSION; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.reservedstate.service.ReservedStateErrorTask.checkErrorVersion; import static org.elasticsearch.reservedstate.service.ReservedStateErrorTask.isNewError; @@ -112,7 +113,7 @@ ReservedStateChunk parse(String namespace, XContentParser parser) { try { return stateChunkParser.apply(parser, null); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, -1L, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -134,7 +135,7 @@ public void process(String namespace, XContentParser parser, Consumer try { stateChunk = parse(namespace, parser); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, -1L, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -148,7 +149,7 @@ public void process(String namespace, XContentParser parser, Consumer } public void initEmpty(String namespace, ActionListener listener) { - var missingVersion = new ReservedStateVersion(-1L, Version.CURRENT); + var missingVersion = new ReservedStateVersion(EMPTY_VERSION, Version.CURRENT); var emptyState = new ReservedStateChunk(Map.of(), missingVersion); updateTaskQueue.submitTask( "empty initial cluster state [" + namespace + "]", diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java index 0be4a7972d05c..1a45a357fe621 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java @@ -18,6 +18,9 @@ import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.EMPTY_VERSION; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.NO_VERSION; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.RESTORED_VERSION; import static org.elasticsearch.core.Strings.format; /** @@ -50,8 +53,10 @@ ActionListener listener() { static boolean isNewError(ReservedStateMetadata existingMetadata, Long newStateVersion) { return (existingMetadata == null || existingMetadata.errorMetadata() == null - || newStateVersion <= 0 // version will be -1 when we can't even parse the file, it might be 0 on snapshot restore - || existingMetadata.errorMetadata().version() < newStateVersion); + || existingMetadata.errorMetadata().version() < newStateVersion + || newStateVersion.equals(RESTORED_VERSION) + || newStateVersion.equals(EMPTY_VERSION) + || newStateVersion.equals(NO_VERSION)); } static boolean checkErrorVersion(ClusterState currentState, ErrorState errorState) { diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 2ee9aa0d86a0e..535758ed71eac 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -169,12 +169,11 @@ static boolean checkMetadataVersion( return false; } - // Version -1 is special, it means "empty" - if (reservedStateVersion.version() == -1L) { + if (reservedStateVersion.version().equals(ReservedStateMetadata.EMPTY_VERSION)) { return true; } - // Version 0 is special, snapshot restores will reset to 0. + // require a regular positive version, reject any special version if (reservedStateVersion.version() <= 0L) { logger.warn( () -> format( diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index d075983464f76..70801cdef560b 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; @@ -77,6 +78,13 @@ public final long getUsageCount() { @Override public final void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + // check if the query has any parameters that are not in the supported set (if declared) + Set supported = supportedQueryParameters(); + if (supported != null && supported.containsAll(request.params().keySet()) == false) { + Set unsupported = Sets.difference(request.params().keySet(), supported); + throw new IllegalArgumentException(unrecognized(request, unsupported, supported, "parameter")); + } + // prepare the request for execution; has the side effect of touching the request parameters try (var action = prepareRequest(request, client)) { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 8ce9b08eba205..16813f1141e12 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -365,6 +365,32 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } } + public boolean checkSupported( + RestRequest.Method method, + String path, + Set parameters, + Set capabilities, + RestApiVersion restApiVersion + ) { + Iterator allHandlers = getAllHandlers(null, path); + while (allHandlers.hasNext()) { + RestHandler handler; + MethodHandlers handlers = allHandlers.next(); + if (handlers == null) { + handler = null; + } else { + handler = handlers.getHandler(method, restApiVersion); + } + + if (handler != null) { + var supportedParams = handler.supportedQueryParameters(); + return (supportedParams == null || supportedParams.containsAll(parameters)) + && handler.supportedCapabilities().containsAll(capabilities); + } + } + return false; + } + @Override public Map getStats() { final Iterator methodHandlersIterator = handlers.allNodeValues(); diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index c66fd72279670..4ab89618643f5 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -18,6 +18,7 @@ import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Set; /** * Handler for REST requests @@ -85,6 +86,22 @@ default List routes() { return Collections.emptyList(); } + /** + * The set of query parameters accepted by this rest handler, + * {@code null} if query parameters should not be checked nor validated. + * TODO - make this not nullable when all handlers have been updated + */ + default @Nullable Set supportedQueryParameters() { + return null; + } + + /** + * The set of capabilities this rest handler supports. + */ + default Set supportedCapabilities() { + return Set.of(); + } + /** * Controls whether requests handled by this class are allowed to to access system indices by default. * @return {@code true} if requests handled by this class should be allowed to access system indices. diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java new file mode 100644 index 0000000000000..9b89a6a932dd3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.rest.action.admin.cluster; + +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestActions.NodesResponseRestListener; + +import java.io.IOException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Set; + +@ServerlessScope(Scope.INTERNAL) +public class RestNodesCapabilitiesAction extends BaseRestHandler { + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.GET, "/_capabilities")); + } + + @Override + public Set supportedQueryParameters() { + return Set.of("timeout", "method", "path", "parameters", "capabilities"); + } + + @Override + public String getName() { + return "nodes_capabilities_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + NodesCapabilitiesRequest r = new NodesCapabilitiesRequest().timeout(request.paramAsTime("timeout", null)) + .method(RestRequest.Method.valueOf(request.param("method", "GET"))) + .path(URLDecoder.decode(request.param("path"), StandardCharsets.UTF_8)) + .parameters(request.paramAsStringArray("parameters", Strings.EMPTY_ARRAY)) + .capabilities(request.paramAsStringArray("capabilities", Strings.EMPTY_ARRAY)) + .restApiVersion(request.getRestApiVersion()); + + return channel -> client.admin().cluster().nodesCapabilities(r, new NodesResponseRestListener<>(channel)); + } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index e502904004fef..e90f2ab8f50d2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -1039,6 +1039,7 @@ public static NodeStats createNodeStats() { randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue) ); } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 4885bbc277cb4..8e62a9306a3d4 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -119,7 +119,6 @@ private void assertExpectedLogMessages(Consumer consumer, MockLogAppende Logger testLogger = LogManager.getLogger("org.elasticsearch.test"); MockLogAppender appender = new MockLogAppender(); try (var ignored = appender.capturing("org.elasticsearch.test")) { - appender.start(); Arrays.stream(expectations).forEach(appender::addExpectation); consumer.accept(testLogger); appender.assertAllExpectationsMatched(); diff --git a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java index dfd4ad1fc0a45..176cb50f78e0f 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java @@ -349,8 +349,8 @@ protected int getDocCountOfCommit(IndexCommit indexCommit) { } @Override - synchronized boolean releaseCommit(IndexCommit indexCommit) { - return super.releaseCommit(wrapCommit(indexCommit)); + synchronized boolean releaseCommit(IndexCommit acquiredCommit) { + return super.releaseCommit(wrapCommit(acquiredCommit)); } }; diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 463f268657187..72abe322c702b 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -4000,6 +4000,7 @@ static boolean hasCircularReference(Exception cause) { return false; } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108321") public void testDisabledFsync() throws IOException { var config = new TranslogConfig( shardId, diff --git a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java index 1ad790ae31804..88661abf5f1fe 100644 --- a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java +++ b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java @@ -49,6 +49,7 @@ import java.util.Set; import static org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata.ErrorKind.TRANSIENT; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.EMPTY_VERSION; public class ReadinessServiceTests extends ESTestCase implements ReadinessClientProbe { private ClusterService clusterService; @@ -59,7 +60,7 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient private static Metadata emptyReservedStateMetadata; static { - var fileSettingsState = new ReservedStateMetadata.Builder(FileSettingsService.NAMESPACE).version(-1L); + var fileSettingsState = new ReservedStateMetadata.Builder(FileSettingsService.NAMESPACE).version(EMPTY_VERSION); emptyReservedStateMetadata = new Metadata.Builder().put(fileSettingsState.build()).build(); } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 53ca55f8a5f81..aca5d2cbee2c9 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.reservedstate.service; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -55,7 +55,6 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106968") public class FileSettingsServiceTests extends ESTestCase { private Environment env; private ClusterService clusterService; @@ -234,54 +233,11 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); }).when(spiedController).parse(any(String.class), any()); - service.start(); - service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); - assertTrue(service.watching()); - - Files.createDirectories(service.watchedFileDir()); - - // Make some fake settings file to cause the file settings service to process it - writeTestFile(service.watchedFile(), "{}"); - - // we need to wait a bit, on MacOS it may take up to 10 seconds for the Java watcher service to notice the file, - // on Linux is instantaneous. Windows is instantaneous too. - assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - - // Stopping the service should interrupt the watcher thread, we should be able to stop - service.stop(); - assertFalse(service.watching()); - service.close(); - // let the deadlocked thread end, so we can cleanly exit the test - deadThreadLatch.countDown(); - } - - public void testStopWorksIfProcessingDidntReturnYet() throws Exception { - var spiedController = spy(controller); - var service = new FileSettingsService(clusterService, spiedController, env); - - CountDownLatch processFileLatch = new CountDownLatch(1); - CountDownLatch deadThreadLatch = new CountDownLatch(1); - - doAnswer((Answer) invocation -> { - // allow the other thread to continue, but hold on a bit to avoid - // completing the task immediately in the main watcher loop - try { - Thread.sleep(1_000); - } catch (InterruptedException e) { - // pass it on - Thread.currentThread().interrupt(); - } - processFileLatch.countDown(); - new Thread(() -> { - // Simulate a thread that never allows the completion to complete - try { - deadThreadLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); - }).when(spiedController).parse(any(String.class), any()); + doAnswer((Answer) invocation -> { + var completionListener = invocation.getArgument(1, ActionListener.class); + completionListener.onResponse(null); + return null; + }).when(spiedController).initEmpty(any(String.class), any()); service.start(); service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); @@ -296,7 +252,7 @@ public void testStopWorksIfProcessingDidntReturnYet() throws Exception { // on Linux is instantaneous. Windows is instantaneous too. assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - // Stopping the service should interrupt the watcher thread, allowing the whole thing to exit + // Stopping the service should interrupt the watcher thread, we should be able to stop service.stop(); assertFalse(service.watching()); service.close(); diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 38f8ad4766b7e..5c034a81fc9cd 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -269,7 +269,6 @@ public void testManyEval() throws IOException { assertMap(map, matchesMap().entry("columns", columns).entry("values", hasSize(10_000))); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108104") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(490)); diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 02520c4ac723a..94a61e57be5b1 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -1444,10 +1444,10 @@ public static void waitForOpsToComplete(InternalEngine engine, long seqNo) throw assertBusy(() -> assertThat(engine.getLocalCheckpointTracker().getProcessedCheckpoint(), greaterThanOrEqualTo(seqNo))); } - public static boolean hasSnapshottedCommits(Engine engine) { + public static boolean hasAcquiredIndexCommits(Engine engine) { assert engine instanceof InternalEngine : "only InternalEngines have snapshotted commits, got: " + engine.getClass(); InternalEngine internalEngine = (InternalEngine) engine; - return internalEngine.hasSnapshottedCommits(); + return internalEngine.hasAcquiredIndexCommits(); } public static final class PrimaryTermSupplier implements LongSupplier { diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java index 8d4085623d156..eccbf602f2c71 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java @@ -234,11 +234,24 @@ protected final void assertSerialization(T testInstance, TransportVersion versio * how equality is checked. */ protected void assertEqualInstances(T expectedInstance, T newInstance) { - assertNotSame(newInstance, expectedInstance); + if (shouldBeSame(newInstance)) { + assertSame(newInstance, expectedInstance); + } else { + assertNotSame(newInstance, expectedInstance); + } assertThat(newInstance, equalTo(expectedInstance)); assertThat(newInstance.hashCode(), equalTo(expectedInstance.hashCode())); } + /** + * Should this copy be the same instance as what we're copying? Defaults to + * {@code false} but implementers might override if the serialization returns + * a reuse constant. + */ + protected boolean shouldBeSame(T newInstance) { + return false; + } + protected final T copyInstance(T instance) throws IOException { return copyInstance(instance, TransportVersion.current()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index bea222a9d8341..83f7fdfe386c7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -64,6 +64,7 @@ import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateUtils; @@ -259,6 +260,7 @@ public static void resetPortCounter() { // TODO: consolidate logging initialization for tests so it all occurs in logconfigurator LogConfigurator.loadLog4jPlugins(); LogConfigurator.configureESLogging(); + MockLogAppender.init(); final List testAppenders = new ArrayList<>(3); for (String leakLoggerName : Arrays.asList("io.netty.util.ResourceLeakDetector", LeakTracker.class.getName())) { @@ -1058,6 +1060,11 @@ public static String randomAlphaOfLength(int codeUnits) { return RandomizedTest.randomAsciiOfLength(codeUnits); } + public static SecureString randomSecureStringOfLength(int codeUnits) { + var randomAlpha = randomAlphaOfLength(codeUnits); + return new SecureString(randomAlpha.toCharArray()); + } + public static String randomNullOrAlphaOfLength(int codeUnits) { return randomBoolean() ? null : randomAlphaOfLength(codeUnits); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 66e2664c7b8b9..03af54de96482 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1294,7 +1294,7 @@ public void beforeIndexDeletion() throws Exception { assertNoPendingIndexOperations(); assertAllPendingWriteLimitsReleased(); assertOpenTranslogReferences(); - assertNoSnapshottedIndexCommit(); + assertNoAcquiredIndexCommit(); } private void assertAllPendingWriteLimitsReleased() throws Exception { @@ -1357,7 +1357,7 @@ private void assertOpenTranslogReferences() throws Exception { }, 60, TimeUnit.SECONDS); } - private void assertNoSnapshottedIndexCommit() throws Exception { + private void assertNoAcquiredIndexCommit() throws Exception { assertBusy(() -> { for (NodeAndClient nodeAndClient : nodes.values()) { IndicesService indexServices = getInstance(IndicesService.class, nodeAndClient.name); @@ -1368,7 +1368,7 @@ private void assertNoSnapshottedIndexCommit() throws Exception { if (engine instanceof InternalEngine) { assertFalse( indexShard.routingEntry().toString() + " has unreleased snapshotted index commits", - EngineTestCase.hasSnapshottedCommits(engine) + EngineTestCase.hasAcquiredIndexCommits(engine) ); } } catch (AlreadyClosedException ignored) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index 10a3a8a78e483..bc3723119afa9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; import org.apache.logging.log4j.core.config.Property; @@ -19,9 +18,10 @@ import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.equalTo; @@ -31,12 +31,38 @@ /** * Test appender that can be used to verify that certain events were logged correctly */ -public class MockLogAppender extends AbstractAppender { +public class MockLogAppender { + private static final Map> mockAppenders = new ConcurrentHashMap<>(); + private static final RealMockAppender parent = new RealMockAppender(); private final List expectations; + private volatile boolean isAlive = true; + + private static class RealMockAppender extends AbstractAppender { + + RealMockAppender() { + super("mock", null, null, false, Property.EMPTY_ARRAY); + } + + @Override + public void append(LogEvent event) { + List appenders = mockAppenders.get(event.getLoggerName()); + if (appenders == null) { + // check if there is a root appender + appenders = mockAppenders.getOrDefault("", List.of()); + } + for (MockLogAppender appender : appenders) { + if (appender.isAlive == false) { + continue; + } + for (LoggingExpectation expectation : appender.expectations) { + expectation.match(event); + } + } + } + } public MockLogAppender() { - super("mock", null, null, false, Property.EMPTY_ARRAY); /* * We use a copy-on-write array list since log messages could be appended while we are setting up expectations. When that occurs, * we would run into a concurrent modification exception from the iteration over the expectations in #append, concurrent with a @@ -45,15 +71,16 @@ public MockLogAppender() { expectations = new CopyOnWriteArrayList<>(); } - public void addExpectation(LoggingExpectation expectation) { - expectations.add(new WrappedLoggingExpectation(expectation)); + /** + * Initialize the mock log appender with the log4j system. + */ + public static void init() { + parent.start(); + Loggers.addAppender(LogManager.getLogger(""), parent); } - @Override - public void append(LogEvent event) { - for (LoggingExpectation expectation : expectations) { - expectation.match(event); - } + public void addExpectation(LoggingExpectation expectation) { + expectations.add(new WrappedLoggingExpectation(expectation)); } public void assertAllExpectationsMatched() { @@ -213,7 +240,7 @@ public void assertMatched() { */ private static class WrappedLoggingExpectation implements LoggingExpectation { - private final AtomicBoolean assertMatchedCalled = new AtomicBoolean(false); + private volatile boolean assertMatchedCalled = false; private final LoggingExpectation delegate; private WrappedLoggingExpectation(LoggingExpectation delegate) { @@ -230,7 +257,7 @@ public void assertMatched() { try { delegate.assertMatched(); } finally { - assertMatchedCalled.set(true); + assertMatchedCalled = true; } } @@ -243,34 +270,43 @@ public String toString() { /** * Adds the list of class loggers to this {@link MockLogAppender}. * - * Stops ({@link #stop()}) and runs some checks on the {@link MockLogAppender} once the returned object is released. + * Stops and runs some checks on the {@link MockLogAppender} once the returned object is released. */ public Releasable capturing(Class... classes) { - return appendToLoggers(Arrays.stream(classes).map(LogManager::getLogger).toList()); + return appendToLoggers(Arrays.stream(classes).map(Class::getCanonicalName).toList()); } /** * Same as above except takes string class names of each logger. */ public Releasable capturing(String... names) { - return appendToLoggers(Arrays.stream(names).map(LogManager::getLogger).toList()); + return appendToLoggers(Arrays.asList(names)); } - private Releasable appendToLoggers(List loggers) { - start(); - for (final var logger : loggers) { - Loggers.addAppender(logger, this); + private Releasable appendToLoggers(List loggers) { + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + if (v == null) { + v = new CopyOnWriteArrayList<>(); + } + v.add(this); + return v; + }); } return () -> { - for (final var logger : loggers) { - Loggers.removeAppender(logger, this); + isAlive = false; + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + assert v != null; + v.remove(this); + return v.isEmpty() ? null : v; + }); } - stop(); // check that all expectations have been evaluated before this is released for (WrappedLoggingExpectation expectation : expectations) { assertThat( "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, - expectation.assertMatchedCalled.get(), + expectation.assertMatchedCalled, is(true) ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 8997844bdbba1..fd3ba7d864f99 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -350,7 +350,13 @@ public void initClient() throws IOException { assert nodesVersions != null; } - protected List createAdditionalFeatureSpecifications() { + /** + * Override to provide additional test-only historical features. + * + * Note: This extension point cannot be used to add cluster features. The provided {@link FeatureSpecification}s + * must contain only historical features, otherwise an assertion error is thrown. + */ + protected List additionalTestOnlyHistoricalFeatures() { return List.of(); } @@ -368,7 +374,7 @@ protected final TestFeatureService createTestFeatureService( ); } return new ESRestTestFeatureService( - createAdditionalFeatureSpecifications(), + additionalTestOnlyHistoricalFeatures(), semanticNodeVersions, ClusterFeatures.calculateAllNodeFeatures(clusterStateFeatures.values()) ); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index c9c39b206ada8..78a4126ec09db 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -58,10 +58,15 @@ class ESRestTestFeatureService implements TestFeatureService { if (MetadataHolder.HISTORICAL_FEATURES != null) { specs.add(MetadataHolder.HISTORICAL_FEATURES); } - var historicalFeatures = FeatureData.createFromSpecifications(specs).getHistoricalFeatures(); - this.knownHistoricalFeatureNames = historicalFeatures.lastEntry().getValue(); + FeatureData featureData = FeatureData.createFromSpecifications(specs); + assert featureData.getNodeFeatures().isEmpty() + : Strings.format( + "Only historical features can be injected via ESRestTestCase#additionalTestOnlyHistoricalFeatures(), rejecting %s", + featureData.getNodeFeatures().keySet() + ); + this.knownHistoricalFeatureNames = featureData.getHistoricalFeatures().lastEntry().getValue(); this.version = nodeVersions.stream().min(Comparator.naturalOrder()).orElse(Version.CURRENT); - this.allSupportedFeatures = Sets.union(clusterStateFeatures, historicalFeatures.floorEntry(version).getValue()); + this.allSupportedFeatures = Sets.union(clusterStateFeatures, featureData.getHistoricalFeatures().floorEntry(version).getValue()); } public static boolean hasFeatureMetadata() { @@ -81,15 +86,16 @@ public boolean clusterHasFeature(String featureId) { Matcher matcher = VERSION_FEATURE_PATTERN.matcher(featureId); if (matcher.matches()) { Version extractedVersion = Version.fromString(matcher.group(1)); - if (Version.V_8_14_0.before(extractedVersion)) { + if (Version.V_8_15_0.before(extractedVersion)) { // As of version 8.14.0 REST tests have been migrated to use features only. - // For migration purposes we provide a synthetic version feature gte_vX.Y.Z for any version at or before 8.14.0. + // For migration purposes we provide a synthetic version feature gte_vX.Y.Z for any version at or before 8.15.0 + // allowing for some transition period. throw new IllegalArgumentException( Strings.format( "Synthetic version features are only available before [%s] for migration purposes! " - + "Please add a cluster feature to an appropriate FeatureSpecification; features only necessary for " - + "testing can be supplied via ESRestTestCase#createAdditionalFeatureSpecifications()", - Version.V_8_14_0 + + "Please add a cluster feature to an appropriate FeatureSpecification; test-only historical-features " + + "can be supplied via ESRestTestCase#additionalTestOnlyHistoricalFeatures()", + Version.V_8_15_0 ) ); } @@ -99,10 +105,9 @@ public boolean clusterHasFeature(String featureId) { if (hasFeatureMetadata()) { throw new IllegalArgumentException( Strings.format( - "Unknown feature %s: check the feature has been added to the correct FeatureSpecification in the relevant module or, " - + "if this is a legacy feature used only in tests, to a test-only FeatureSpecification such as %s.", - featureId, - RestTestLegacyFeatures.class.getCanonicalName() + "Unknown feature %s: check the respective FeatureSpecification is provided both in module-info.java " + + "as well as in META-INF/services and verify the module is loaded during tests.", + featureId ) ); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java new file mode 100644 index 0000000000000..4973bb83311bc --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.concurrent.atomic.AtomicBoolean; + +public class MockLogAppenderTests extends ESTestCase { + + public void testConcurrentLogAndLifecycle() throws Exception { + Logger logger = LogManager.getLogger(MockLogAppenderTests.class); + final var keepGoing = new AtomicBoolean(true); + final var logThread = new Thread(() -> { + while (keepGoing.get()) { + logger.info("test"); + } + }); + logThread.start(); + + final var appender = new MockLogAppender(); + for (int i = 0; i < 1000; i++) { + try (var ignored = appender.capturing(MockLogAppenderTests.class)) { + Thread.yield(); + } + } + + keepGoing.set(false); + logThread.join(); + } +} diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml index 3ca15224dafc4..75671948de11a 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml @@ -8,3 +8,7 @@ template: sort: field: "@timestamp" order: desc + mapping: + ignore_malformed: true + total_fields: + ignore_dynamic_beyond_limit: true diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml index e6c84b6ed06f9..819d5d7eafb8e 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml @@ -6,3 +6,9 @@ _meta: template: settings: codec: best_compression + mapping: + # apm@settings sets `ignore_malformed: true`, but we need + # to disable this for metrics since they use synthetic source, + # and this combination is incompatible with the + # aggregate_metric_double field type. + ignore_malformed: false diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml index 3d9c1490e5a86..6c5d991621315 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml @@ -20,5 +20,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: logs-apm.app@default-pipeline final_pipeline: apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml index 4adcf125b2df9..6373363774602 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml @@ -27,5 +27,5 @@ template: value: error settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: logs-apm.error@default-pipeline final_pipeline: apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml index c2233469110f8..a8f3e8a4c99e3 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml @@ -22,5 +22,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.app@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml index 3d6d05c58e780..1aa06a361b722 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml @@ -23,7 +23,7 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.internal@default-pipeline final_pipeline: metrics-apm@pipeline mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml index f234b60b1a6ec..729110457f53e 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_destination@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml index aa4f212532e56..0e18d1cd179ef 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_destination@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml index 9b1a26486f482..d349c62e2255c 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_destination@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml index c37ec93651d9d..f71a4c70abde7 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_summary@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml index 3a99bc8472c66..218fbb2eaac87 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_summary@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml index d829967f7eddf..9421b8e2f1fce 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_summary@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml index bc21b35d4777f..5e8b7e94673f4 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml index 87a1e254baea7..c51bd79c6513d 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml index b45ce0ec0fad7..22e56fd7cabca 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml index 51d3c90cb4af8..6b4102bb673b8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml index 8825a93db28dc..7b10125fbce99 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml index e6657fbfe5d28..62359a8729f08 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml index 174aec8c5515a..4f4d9a6a7e7d8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml @@ -23,7 +23,7 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: traces-apm.rum@default-pipeline final_pipeline: traces-apm@pipeline mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml index de9c47dfd3f1b..e5c2ef8d57471 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml @@ -22,7 +22,7 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: traces-apm@default-pipeline final_pipeline: traces-apm@pipeline mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml deleted file mode 100644 index 65d8840e8f713..0000000000000 --- a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml +++ /dev/null @@ -1,56 +0,0 @@ ---- -version: ${xpack.apmdata.template.version} -_meta: - managed: true -description: | - Built-in default ingest pipeline for all APM data streams. - - This pipeline exists purely for routing, which cannot be - performed in a final pipeline, and for invoking user-defined - custom pipelines. All built-in processing occurs in the final - pipelines. -processors: - # Older versions of apm-server write various metrics to the - # metrics-apm.internal data stream, which newer versions break - # into separate datasets. We reroute these metrics coming from - # older versions of apm-server based on 'metricset.name'. -- set: - if: | - (ctx.data_stream?.dataset == 'apm.internal' || ctx['data_stream.dataset'] == 'apm.internal') && - (ctx.metricset?.name == 'transaction' || ctx.metricset?.name == 'service_destination') - field: metricset.interval - value: 1m - override: false -- reroute: - if: | - (ctx.data_stream?.dataset == 'apm.internal' || ctx['data_stream.dataset'] == 'apm.internal') && - (ctx.metricset?.name == 'transaction') - dataset: apm.transaction.1m -- reroute: - if: | - (ctx.data_stream?.dataset == 'apm.internal' || ctx['data_stream.dataset'] == 'apm.internal') && - (ctx.metricset?.name == 'service_destination') - dataset: apm.service_destination.1m - -# Invoke user-defined custom pipelines, in ascending order of specificity: -- pipeline: - name: global@custom - ignore_missing_pipeline: true -- pipeline: - name: "{{{data_stream.type}}}@custom" - ignore_missing_pipeline: true -- pipeline: - if: "ctx?.data_stream?.dataset != 'apm'" - name: "{{{data_stream.type}}}-apm@custom" - ignore_missing_pipeline: true -- pipeline: - # (logs|metrics)-apm.app.-* should invoke (logs|metrics)-apm.app@custom, - # i.e. excluding service.name from the dataset. - if: "ctx.data_stream?.dataset != null && ctx.data_stream?.dataset.startsWith('apm.app.')" - name: "{{{data_stream.type}}}-apm.app@custom" - ignore_missing_pipeline: true -- pipeline: - # other data streams should include the whole dataset. - if: "ctx.data_stream?.dataset != null && !ctx.data_stream?.dataset.startsWith('apm.app.')" - name: "{{{data_stream.type}}}-{{{data_stream.dataset}}}@custom" - ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml new file mode 100644 index 0000000000000..a1f9565676fd4 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for logs-apm.app.*-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: logs@custom + ignore_missing_pipeline: true +- pipeline: + name: logs-apm.app@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml new file mode 100644 index 0000000000000..c46a1c1b44f96 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for logs-apm.error-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: logs@custom + ignore_missing_pipeline: true +- pipeline: + name: logs-apm.error@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml new file mode 100644 index 0000000000000..bc07840727cca --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.app.*-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.app@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml new file mode 100644 index 0000000000000..247ee4cae67f0 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml @@ -0,0 +1,38 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.internal-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: + # Older versions of apm-server write various metrics to the + # metrics-apm.internal data stream, which newer versions break + # into separate datasets. We reroute these metrics coming from + # older versions of apm-server based on 'metricset.name'. +- set: + if: "ctx.metricset?.name == 'transaction' || ctx.metricset?.name == 'service_destination'" + field: metricset.interval + value: 1m + override: false +- reroute: + if: "ctx.metricset?.name == 'transaction'" + dataset: apm.transaction.1m +- reroute: + if: "ctx.metricset?.name == 'service_destination'" + dataset: apm.service_destination.1m + +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.internal@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml new file mode 100644 index 0000000000000..d8912fc2dd220 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.service_destination.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.service_destination@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml new file mode 100644 index 0000000000000..4cf5652e46bf4 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.service_summary.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.service_summary@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml new file mode 100644 index 0000000000000..44ab85998cee7 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.service_transaction.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: "global@custom" + ignore_missing_pipeline: true +- pipeline: + name: "metrics@custom" + ignore_missing_pipeline: true +- pipeline: + name: "metrics-apm.service_transaction@custom" + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml new file mode 100644 index 0000000000000..12e58e6747b5a --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.transaction.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.transaction@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml new file mode 100644 index 0000000000000..b1ce73308c5bc --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for traces-apm.rum-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipeline. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: traces@custom + ignore_missing_pipeline: true +- pipeline: + name: traces-apm.rum@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml new file mode 100644 index 0000000000000..039b6dccf7d57 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for traces-apm-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipeline. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: traces@custom + ignore_missing_pipeline: true +- pipeline: + name: traces-apm@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index 71b54ae6297db..772057d4931a3 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 1 +version: 4 component-templates: # Data lifecycle. @@ -49,7 +49,27 @@ index-templates: # Ingest pipeline configuration requires to manually specify pipeline dependencies ingest-pipelines: - - apm@default-pipeline: {} + # Default pipelines. + # + # Each data stream index template gets its own default pipeline, + # with the exception of the interval data streams which share one + # for all intervals of the same metric, and the sampled traces + # data stream which does not have (or need) one. + - logs-apm.app@default-pipeline: {} + - logs-apm.error@default-pipeline: {} + - metrics-apm.app@default-pipeline: {} + - metrics-apm.internal@default-pipeline: + dependencies: + - metrics-apm.service_destination@default-pipeline + - metrics-apm.transaction@default-pipeline + - metrics-apm.service_destination@default-pipeline: {} + - metrics-apm.service_summary@default-pipeline: {} + - metrics-apm.service_transaction@default-pipeline: {} + - metrics-apm.transaction@default-pipeline: {} + - traces-apm@default-pipeline: {} + - traces-apm.rum@default-pipeline: {} + + # Final pipelines. - apm@pipeline: {} - traces-apm@pipeline: dependencies: diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java index 4f6a5b58ff38d..8228d7011c9c1 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java @@ -322,6 +322,36 @@ public void testIndexTemplateConventions() throws Exception { .filter(t -> t.endsWith("@custom")) .toList(); assertThat(requiredCustomComponentTemplates, empty()); + + final Settings settings = template.template().settings(); + if (namePrefix.equals("traces-apm.sampled")) { + // traces-apm.sampled does not have any ingest pipelines. + assertThat(settings, equalTo(null)); + } else { + final boolean isIntervalDataStream = dataStreamType.equals("metrics") && namePrefix.matches(".*\\.[0-9]+m"); + final String defaultPipeline = settings.get("index.default_pipeline"); + if (isIntervalDataStream) { + // e.g. metrics-apm.service_transaction.10m should call + // metrics-apm.service_transaction@default-pipeline + final String withoutInterval = namePrefix.substring(0, namePrefix.lastIndexOf('.')); + assertThat(defaultPipeline, equalTo(withoutInterval + "@default-pipeline")); + } else { + // All other data streams should call a default pipeline + // specific to the data stream. + assertThat(defaultPipeline, equalTo(namePrefix + "@default-pipeline")); + break; + } + + final String finalPipeline = settings.get("index.final_pipeline"); + switch (dataStreamType) { + case "metrics", "traces": + assertThat(finalPipeline, equalTo(dataStreamType + "-apm@pipeline")); + break; + default: + assertThat(finalPipeline, equalTo("apm@pipeline")); + break; + } + } } } diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml index f4397ca18c101..4b45fda66835c 100644 --- a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml @@ -56,8 +56,7 @@ setup: --- "Test traces-apm-* data stream indexing": - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102360" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/102360" - do: index: diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml index 8a039e7b4eb1d..339b3b56462ac 100644 --- a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml @@ -22,6 +22,51 @@ setup: ] } + - do: + ingest.put_pipeline: + id: "logs@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "logs-apm.app@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "logs-apm.error@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + - do: ingest.put_pipeline: id: "metrics@custom" @@ -39,7 +84,7 @@ setup: - do: ingest.put_pipeline: - id: "metrics-apm@custom" + id: "metrics-apm.internal@custom" body: > { "processors": [ @@ -67,6 +112,66 @@ setup: ] } + - do: + ingest.put_pipeline: + id: "metrics-apm.service_destination@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "metrics-apm.service_summary@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "metrics-apm.service_transaction@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "metrics-apm.transaction@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + - do: ingest.put_pipeline: id: "traces@custom" @@ -97,42 +202,114 @@ setup: ] } + - do: + ingest.put_pipeline: + id: "traces-apm.rum@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + --- -"Test metrics @custom ingest pipelines": +"Test logs @custom ingest pipelines": - do: bulk: - index: metrics-apm.app.svc1-testing refresh: true body: - - create: {} - - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.app.svc1", "namespace": "testing"}}' + - create: {"_index": "logs-apm.app.svc1-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "logs", "dataset": "apm.app.svc1", "namespace": "testing"}}' + - create: {"_index": "logs-apm.error-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "logs", "dataset": "apm.error", "namespace": "testing"}}' - is_false: errors + - do: { search: { index: logs-apm.app.svc1-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,logs@custom,logs-apm.app@custom" + + - do: { search: { index: logs-apm.error-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,logs@custom,logs-apm.error@custom" + +--- +"Test metrics @custom ingest pipelines": - do: - search: - index: metrics-apm.app.svc1-testing + bulk: + refresh: true body: - fields: ["custom_pipelines"] + - create: {"_index": "metrics-apm.app.svc1-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.app.svc1", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.internal-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.internal", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.service_destination.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.service_destination.1m", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.service_summary.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.service_summary.1m", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.service_transaction.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.service_transaction.1m", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.transaction.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.transaction.1m", "namespace": "testing"}}' + + - is_false: errors + + - do: { search: { index: metrics-apm.app.svc1-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.app@custom" + + - do: { search: { index: metrics-apm.internal-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.internal@custom" + + - do: { search: { index: metrics-apm.service_destination.1m-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.service_destination@custom" + + - do: { search: { index: metrics-apm.service_summary.1m-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.service_summary@custom" + + - do: { search: { index: metrics-apm.service_transaction.1m-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.service_transaction@custom" + + - do: { search: { index: metrics-apm.transaction.1m-testing } } - length: { hits.hits: 1 } - match: - hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm@custom,metrics-apm.app@custom" + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.transaction@custom" --- "Test traces @custom ingest pipelines": - do: bulk: - index: traces-apm-testing refresh: true body: - - create: {} + - create: {"_index": "traces-apm-testing"} - '{"@timestamp": "2017-06-22", "data_stream": {"type": "traces", "dataset": "apm", "namespace": "testing"}}' + - create: {"_index": "traces-apm.rum-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "traces", "dataset": "apm.rum", "namespace": "testing"}}' - is_false: errors - - do: - search: - index: traces-apm-testing + - do: { search: { index: traces-apm-testing } } - length: { hits.hits: 1 } - match: hits.hits.0._source.custom_pipelines: ",global@custom,traces@custom,traces-apm@custom" + + - do: { search: { index: traces-apm.rum-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,traces@custom,traces-apm.rum@custom" diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml new file mode 100644 index 0000000000000..97265a9b81a75 --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml @@ -0,0 +1,100 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + + - do: + cluster.put_component_template: + name: "logs-apm.app@custom" + body: + template: + settings: + mapping: + total_fields: + limit: 20 + +--- +"Test ignore_malformed": + - do: + bulk: + index: traces-apm-testing + refresh: true + body: + # Passing a (non-coercable) string into a numeric field should not + # cause an indexing failure; it should just not be indexed. + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": "string"}}' + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": 123}}' + + - is_false: errors + + - do: + search: + index: traces-apm-testing + body: + fields: ["numeric_labels.*", "_ignored"] + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields: {"_ignored": ["numeric_labels.key"]} } + - match: { hits.hits.1.fields: {"numeric_labels.key": [123.0]} } + +--- +"Test ignore_dynamic_beyond_limit": + - do: + bulk: + index: logs-apm.app.svc1-testing + refresh: true + body: + - create: {} + - {"@timestamp": "2017-06-22", "k1": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k2": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k3": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k4": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k5": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k6": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k7": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k8": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k9": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k10": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k11": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k12": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k13": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k14": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k15": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k16": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k17": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k18": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k19": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k20": ""} + + - is_false: errors + + - do: + search: + index: logs-apm.app.svc1-testing + body: + query: + term: + _ignored: + value: k20 + - length: { hits.hits: 1 } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java index e8badfbee1e3e..ed5670a4bcc3b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java @@ -214,9 +214,9 @@ public void testGetSessionDoesNotLeakFileIfClosed() throws IOException { sessionReader.readFileBytes(files.get(1).name(), new BytesArray(new byte[10])); } - assertTrue(EngineTestCase.hasSnapshottedCommits(IndexShardTestCase.getEngine(indexShard))); + assertTrue(EngineTestCase.hasAcquiredIndexCommits(IndexShardTestCase.getEngine(indexShard))); restoreSourceService.closeSession(sessionUUID); - assertFalse(EngineTestCase.hasSnapshottedCommits(IndexShardTestCase.getEngine(indexShard))); + assertFalse(EngineTestCase.hasAcquiredIndexCommits(IndexShardTestCase.getEngine(indexShard))); closeShards(indexShard); // Exception will be thrown if file is not closed. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java index 08a2d5ae4f5b4..ee721d9d55714 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java @@ -90,7 +90,7 @@ protected NodesResponse newResponse(NodesRequest request, List remoteIndicesPrivileges = new ArrayList<>(); private RemoteClusterPermissions remoteClusterPermissions = RemoteClusterPermissions.NONE; private boolean restrictRequest = false; + private String description; public PutRoleRequest() {} @@ -63,6 +64,10 @@ public void name(String name) { this.name = name; } + public void description(String description) { + this.description = description; + } + public void cluster(String... clusterPrivilegesArray) { this.clusterPrivileges = clusterPrivilegesArray; } @@ -164,6 +169,10 @@ public String name() { return name; } + public String description() { + return description; + } + public String[] cluster() { return clusterPrivileges; } @@ -213,7 +222,8 @@ public RoleDescriptor roleDescriptor() { Collections.emptyMap(), remoteIndicesPrivileges.toArray(new RoleDescriptor.RemoteIndicesPrivileges[0]), remoteClusterPermissions, - null + null, + description ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java index daf485814c799..486a347775264 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java @@ -21,7 +21,7 @@ */ public class PutRoleRequestBuilder extends ActionRequestBuilder { - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowDescription(true).build(); public PutRoleRequestBuilder(ElasticsearchClient client) { super(client, PutRoleAction.INSTANCE, new PutRoleRequest()); @@ -43,6 +43,7 @@ public PutRoleRequestBuilder source(String name, BytesReference source, XContent request.addApplicationPrivileges(descriptor.getApplicationPrivileges()); request.runAs(descriptor.getRunAs()); request.metadata(descriptor.getMetadata()); + request.description(descriptor.getDescription()); return this; } @@ -51,6 +52,11 @@ public PutRoleRequestBuilder name(String name) { return this; } + public PutRoleRequestBuilder description(String description) { + request.description(description); + return this; + } + public PutRoleRequestBuilder cluster(String... cluster) { request.cluster(cluster); return this; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java index 472faee97a707..ec8fcd1c421ef 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.restriction.WorkflowResolver; import org.elasticsearch.xpack.core.security.support.MetadataUtils; +import org.elasticsearch.xpack.core.security.support.Validation; import java.util.Arrays; import java.util.Set; @@ -102,6 +103,12 @@ public static ActionRequestValidationException validate( } } } + if (roleDescriptor.hasDescription()) { + Validation.Error error = Validation.Roles.validateRoleDescription(roleDescriptor.getDescription()); + if (error != null) { + validationException = addValidationError(error.toString(), validationException); + } + } return validationException; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java index f85ca260c3fff..039ed8aa5fb64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java @@ -166,4 +166,16 @@ public void writeTo(StreamOutput out) throws IOException { public ExpressionRoleMapping getMapping() { return new ExpressionRoleMapping(name, rules, roles, roleTemplates, metadata, enabled); } + + public static PutRoleMappingRequest fromMapping(ExpressionRoleMapping mapping) { + var request = new PutRoleMappingRequest(); + request.setName(mapping.getName()); + request.setEnabled(mapping.isEnabled()); + request.setRoles(mapping.getRoles()); + request.setRoleTemplates(mapping.getRoleTemplates()); + request.setRules(mapping.getExpression()); + request.setMetadata(mapping.getMetadata()); + + return request; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java index d46c21f080308..88a930063190b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java @@ -9,7 +9,8 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; -import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.RoleMapperExpression; @@ -34,8 +35,8 @@ public PutRoleMappingRequestBuilder(ElasticsearchClient client) { /** * Populate the put role request from the source and the role's name */ - public PutRoleMappingRequestBuilder source(String name, XContentParser parser) throws IOException { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); + public PutRoleMappingRequestBuilder source(String name, BytesReference source, XContentType xContentType) throws IOException { + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, source, xContentType); request.setName(name); request.setEnabled(mapping.isEnabled()); request.setRoles(mapping.getRoles()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java index f91df320bb92d..82bfc4b4a0dd4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java @@ -224,7 +224,10 @@ public static final class RoleDescriptorsBytes implements Writeable { public static final RoleDescriptorsBytes EMPTY = new RoleDescriptorsBytes(new BytesArray("{}")); - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .allowDescription(true) + .build(); private final BytesReference rawBytes; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index caa5567364cd3..1dc293f929121 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -49,6 +49,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.common.xcontent.XContentHelper.createParserNotCompressed; + /** * A holder for a Role that contains user-readable information about the Role * without containing the actual Role object. @@ -70,6 +72,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { private final Restriction restriction; private final Map metadata; private final Map transientMetadata; + private final String description; /** * Needed as a stop-gap measure because {@link FieldPermissionsCache} has state (settings) but we need to use one @@ -93,7 +96,7 @@ public RoleDescriptor( /** * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], - * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction)} + * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction, String)} */ @Deprecated public RoleDescriptor( @@ -108,7 +111,7 @@ public RoleDescriptor( /** * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], - * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction)} + * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction, String)} */ @Deprecated public RoleDescriptor( @@ -130,7 +133,8 @@ public RoleDescriptor( transientMetadata, RemoteIndicesPrivileges.NONE, RemoteClusterPermissions.NONE, - Restriction.NONE + Restriction.NONE, + null ); } @@ -155,7 +159,8 @@ public RoleDescriptor( transientMetadata, RemoteIndicesPrivileges.NONE, RemoteClusterPermissions.NONE, - Restriction.NONE + Restriction.NONE, + null ); } @@ -170,7 +175,8 @@ public RoleDescriptor( @Nullable Map transientMetadata, @Nullable RemoteIndicesPrivileges[] remoteIndicesPrivileges, @Nullable RemoteClusterPermissions remoteClusterPermissions, - @Nullable Restriction restriction + @Nullable Restriction restriction, + @Nullable String description ) { this.name = name; this.clusterPrivileges = clusterPrivileges != null ? clusterPrivileges : Strings.EMPTY_ARRAY; @@ -187,6 +193,7 @@ public RoleDescriptor( ? remoteClusterPermissions : RemoteClusterPermissions.NONE; this.restriction = restriction != null ? restriction : Restriction.NONE; + this.description = description != null ? description : ""; } public RoleDescriptor(StreamInput in) throws IOException { @@ -218,12 +225,21 @@ public RoleDescriptor(StreamInput in) throws IOException { } else { this.remoteClusterPermissions = RemoteClusterPermissions.NONE; } + if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + this.description = in.readOptionalString(); + } else { + this.description = ""; + } } public String getName() { return this.name; } + public String getDescription() { + return description; + } + public String[] getClusterPrivileges() { return this.clusterPrivileges; } @@ -272,6 +288,10 @@ public boolean hasRunAs() { return runAs.length != 0; } + public boolean hasDescription() { + return description.length() != 0; + } + public boolean hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster() { return hasConfigurableClusterPrivileges() || hasApplicationPrivileges() @@ -338,6 +358,7 @@ public String toString() { sb.append(group.toString()).append(","); } sb.append("], restriction=").append(restriction); + sb.append(", description=").append(description); sb.append("]"); return sb.toString(); } @@ -358,7 +379,8 @@ public boolean equals(Object o) { if (Arrays.equals(runAs, that.runAs) == false) return false; if (Arrays.equals(remoteIndicesPrivileges, that.remoteIndicesPrivileges) == false) return false; if (remoteClusterPermissions.equals(that.remoteClusterPermissions) == false) return false; - return restriction.equals(that.restriction); + if (restriction.equals(that.restriction) == false) return false; + return Objects.equals(description, that.description); } @Override @@ -373,6 +395,7 @@ public int hashCode() { result = 31 * result + Arrays.hashCode(remoteIndicesPrivileges); result = 31 * result + remoteClusterPermissions.hashCode(); result = 31 * result + restriction.hashCode(); + result = 31 * result + Objects.hashCode(description); return result; } @@ -431,6 +454,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, boolea if (hasRestriction()) { builder.field(Fields.RESTRICTION.getPreferredName(), restriction); } + if (hasDescription()) { + builder.field(Fields.DESCRIPTION.getPreferredName(), description); + } return builder.endObject(); } @@ -456,17 +482,22 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS)) { remoteClusterPermissions.writeTo(out); } + if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + out.writeOptionalString(description); + } } public static Parser.Builder parserBuilder() { return new Parser.Builder(); } - public record Parser(boolean allow2xFormat, boolean allowRestriction) { + public record Parser(boolean allow2xFormat, boolean allowRestriction, boolean allowDescription) { public static final class Builder { + private boolean allow2xFormat = false; private boolean allowRestriction = false; + private boolean allowDescription = false; private Builder() {} @@ -480,8 +511,13 @@ public Builder allowRestriction(boolean allowRestriction) { return this; } + public Builder allowDescription(boolean allowDescription) { + this.allowDescription = allowDescription; + return this; + } + public Parser build() { - return new Parser(allow2xFormat, allowRestriction); + return new Parser(allow2xFormat, allowRestriction, allowDescription); } } @@ -565,6 +601,8 @@ public RoleDescriptor parse(String name, XContentParser parser) throws IOExcepti remoteClusterPermissions = parseRemoteCluster(name, parser); } else if (allowRestriction && Fields.RESTRICTION.match(currentFieldName, parser.getDeprecationHandler())) { restriction = Restriction.parse(name, parser); + } else if (allowDescription && Fields.DESCRIPTION.match(currentFieldName, parser.getDeprecationHandler())) { + description = parser.text(); } else if (Fields.TYPE.match(currentFieldName, parser.getDeprecationHandler())) { // don't need it } else { @@ -586,7 +624,8 @@ public RoleDescriptor parse(String name, XContentParser parser) throws IOExcepti null, remoteIndicesPrivileges, remoteClusterPermissions, - restriction + restriction, + description ); } @@ -686,7 +725,7 @@ public static PrivilegesToCheck parsePrivilegesToCheck( } private static XContentParser createParser(BytesReference source, XContentType xContentType) throws IOException { - return XContentHelper.createParserNotCompressed(LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, source, xContentType); + return createParserNotCompressed(LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, source, xContentType); } public static RoleDescriptor.IndicesPrivileges[] parseIndices(String roleName, XContentParser parser, boolean allow2xFormat) @@ -1821,5 +1860,6 @@ public interface Fields { ParseField TYPE = new ParseField("type"); ParseField RESTRICTION = new ParseField("restriction"); ParseField WORKFLOWS = new ParseField("workflows"); + ParseField DESCRIPTION = new ParseField("description"); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java index 446209b1d7ac3..38aa1bc106e99 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java @@ -26,7 +26,10 @@ public record RoleDescriptorsIntersection(Collection> roleDe public static RoleDescriptorsIntersection EMPTY = new RoleDescriptorsIntersection(Collections.emptyList()); - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .allowDescription(true) + .build(); public RoleDescriptorsIntersection(RoleDescriptor roleDescriptor) { this(List.of(Set.of(roleDescriptor))); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 8e4f9108c3b9c..49be4c5d466b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -407,6 +407,7 @@ static RoleDescriptor kibanaSystem(String name) { getRemoteIndicesReadPrivileges("traces-apm.*"), getRemoteIndicesReadPrivileges("traces-apm-*") }, null, + null, null ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 0793578004a4e..dd8f34a60fa1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -102,6 +102,7 @@ public class ReservedRolesStore implements BiConsumer, ActionListene new String[] { "*" } ) ), + null, null ); @@ -201,6 +202,7 @@ private static Map initializeReservedRoles() { getRemoteIndicesReadPrivileges("/metrics-(beats|elasticsearch|enterprisesearch|kibana|logstash).*/"), getRemoteIndicesReadPrivileges("metricbeat-*") }, null, + null, null ) ), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java index 3c482b82075fc..eaf59e001d098 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.security.support; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; @@ -204,10 +205,19 @@ public static Error validatePassword(SecureString password) { public static final class Roles { + public static final int MAX_DESCRIPTION_LENGTH = 1000; + public static Error validateRoleName(String roleName, boolean allowReserved) { return validateRoleName(roleName, allowReserved, MAX_NAME_LENGTH); } + public static Error validateRoleDescription(String description) { + if (description != null && description.length() > MAX_DESCRIPTION_LENGTH) { + return new Error(Strings.format("Role description must be less than %s characters.", MAX_DESCRIPTION_LENGTH)); + } + return null; + } + static Error validateRoleName(String roleName, boolean allowReserved, int maxLength) { if (roleName == null) { return new Error("role name is missing"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java index 1413d7f87eaa1..a1b141d0aa0e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java @@ -46,6 +46,7 @@ public class SystemUser extends InternalUser { null, null, null, + null, null ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java index 710c4c5adaf67..1bad9bdfbfc77 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java @@ -30,8 +30,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomCrossClusterAccessRoleDescriptor; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomCrossClusterAccessRoleDescriptor; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java index 525c805f37929..78cf2020f26cc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java @@ -71,7 +71,8 @@ public void testRoleDescriptorValidation() { null, null, null, - new RoleDescriptor.Restriction(unknownWorkflows) + new RoleDescriptor.Restriction(unknownWorkflows), + null ) ), null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java index 17298c04709a4..bb7778b821457 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java @@ -106,7 +106,8 @@ public void testRoleDescriptorValidation() { null, null, null, - new RoleDescriptor.Restriction(unknownWorkflows) + new RoleDescriptor.Restriction(unknownWorkflows), + null ) ), null diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java index 161e9419f9561..03706d928caad 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java @@ -63,7 +63,8 @@ public void testRoleDescriptorValidation() { null, null, null, - new RoleDescriptor.Restriction(workflows.toArray(String[]::new)) + new RoleDescriptor.Restriction(workflows.toArray(String[]::new)), + null ) ), null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java index b7495004e58e7..483b2426e6ad2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java @@ -314,6 +314,7 @@ public static CrossClusterAccessSubjectInfo randomCrossClusterAccessSubjectInfo( null, null, null, + null, null ) ) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java index f22bf886357c4..ec20e6e5fa2ff 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java @@ -31,7 +31,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo.CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java new file mode 100644 index 0000000000000..e6b9097a023cc --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java @@ -0,0 +1,314 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.authz; + +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Strings; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.core.security.support.MetadataUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.test.ESTestCase.generateRandomStringArray; +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomInt; +import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.test.ESTestCase.randomList; +import static org.elasticsearch.test.ESTestCase.randomNonEmptySubsetOf; +import static org.elasticsearch.test.ESTestCase.randomSubsetOf; +import static org.elasticsearch.test.ESTestCase.randomValueOtherThanMany; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_CLUSTER_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_INDICES_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_CLUSTER_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_INDICES_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.ROLE_DESCRIPTOR_NAME; + +public final class RoleDescriptorTestHelper { + + public static Builder builder() { + return new Builder(); + } + + public static RoleDescriptor randomRoleDescriptor() { + return builder().allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(randomBoolean()) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(); + } + + public static Map randomRoleDescriptorMetadata(boolean allowReservedMetadata) { + final Map metadata = new HashMap<>(); + while (randomBoolean()) { + String key = randomAlphaOfLengthBetween(4, 12); + if (allowReservedMetadata && randomBoolean()) { + key = MetadataUtils.RESERVED_PREFIX + key; + } + final Object value = randomBoolean() ? randomInt() : randomAlphaOfLengthBetween(3, 50); + metadata.put(key, value); + } + return metadata; + } + + public static ConfigurableClusterPrivilege[] randomClusterPrivileges() { + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = switch (randomIntBetween(0, 4)) { + case 0 -> new ConfigurableClusterPrivilege[0]; + case 1 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 2 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 3 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 4 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + default -> throw new IllegalStateException("Unexpected value"); + }; + return configurableClusterPrivileges; + } + + public static RoleDescriptor.ApplicationResourcePrivileges[] randomApplicationPrivileges() { + final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = + new RoleDescriptor.ApplicationResourcePrivileges[randomIntBetween(0, 2)]; + for (int i = 0; i < applicationPrivileges.length; i++) { + final RoleDescriptor.ApplicationResourcePrivileges.Builder builder = RoleDescriptor.ApplicationResourcePrivileges.builder(); + builder.application("app" + randomAlphaOfLengthBetween(5, 12) + (randomBoolean() ? "*" : "")); + if (randomBoolean()) { + builder.privileges("*"); + } else { + builder.privileges(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); + } + if (randomBoolean()) { + builder.resources("*"); + } else { + builder.resources(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); + } + applicationPrivileges[i] = builder.build(); + } + return applicationPrivileges; + } + + public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max) { + return randomRemoteIndicesPrivileges(min, max, Set.of()); + } + + public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max, Set excludedPrivileges) { + final RoleDescriptor.IndicesPrivileges[] innerIndexPrivileges = randomIndicesPrivileges(min, max, excludedPrivileges); + final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges = + new RoleDescriptor.RemoteIndicesPrivileges[innerIndexPrivileges.length]; + for (int i = 0; i < remoteIndexPrivileges.length; i++) { + remoteIndexPrivileges[i] = new RoleDescriptor.RemoteIndicesPrivileges( + innerIndexPrivileges[i], + generateRandomStringArray(5, randomIntBetween(3, 9), false, false) + ); + } + return remoteIndexPrivileges; + } + + public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max) { + return randomIndicesPrivileges(min, max, Set.of()); + } + + public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max, Set excludedPrivileges) { + final RoleDescriptor.IndicesPrivileges[] indexPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(min, max)]; + for (int i = 0; i < indexPrivileges.length; i++) { + indexPrivileges[i] = randomIndicesPrivilegesBuilder(excludedPrivileges).build(); + } + return indexPrivileges; + } + + public static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder() { + return randomIndicesPrivilegesBuilder(Set.of()); + } + + private static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder(Set excludedPrivileges) { + final Set candidatePrivilegesNames = Sets.difference(IndexPrivilege.names(), excludedPrivileges); + assert false == candidatePrivilegesNames.isEmpty() : "no candidate privilege names to random from"; + final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() + .privileges(randomSubsetOf(randomIntBetween(1, 4), candidatePrivilegesNames)) + .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) + .allowRestrictedIndices(randomBoolean()); + randomDlsFls(builder); + return builder; + } + + private static void randomDlsFls(RoleDescriptor.IndicesPrivileges.Builder builder) { + if (randomBoolean()) { + builder.query(randomBoolean() ? Strings.format(""" + { "term": { "%s" : "%s" } } + """, randomAlphaOfLengthBetween(3, 24), randomAlphaOfLengthBetween(3, 24)) : """ + { "match_all": {} } + """); + } + if (randomBoolean()) { + if (randomBoolean()) { + builder.grantedFields("*"); + builder.deniedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); + } else { + builder.grantedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); + } + } + } + + public static RoleDescriptor randomCrossClusterAccessRoleDescriptor() { + final int searchSize = randomIntBetween(0, 3); + final int replicationSize = randomIntBetween(searchSize == 0 ? 1 : 0, 3); + assert searchSize + replicationSize > 0; + + final String[] clusterPrivileges; + if (searchSize > 0 && replicationSize > 0) { + clusterPrivileges = CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; + } else if (searchSize > 0) { + clusterPrivileges = CCS_CLUSTER_PRIVILEGE_NAMES; + } else { + clusterPrivileges = CCR_CLUSTER_PRIVILEGE_NAMES; + } + + final List indexPrivileges = new ArrayList<>(); + for (int i = 0; i < searchSize; i++) { + final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() + .privileges(CCS_INDICES_PRIVILEGE_NAMES) + .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) + .allowRestrictedIndices(randomBoolean()); + randomDlsFls(builder); + indexPrivileges.add(builder.build()); + } + for (int i = 0; i < replicationSize; i++) { + final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() + .privileges(CCR_INDICES_PRIVILEGE_NAMES) + .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) + .allowRestrictedIndices(randomBoolean()); + indexPrivileges.add(builder.build()); + } + + return new RoleDescriptor( + ROLE_DESCRIPTOR_NAME, + clusterPrivileges, + indexPrivileges.toArray(RoleDescriptor.IndicesPrivileges[]::new), + null + ); + } + + public static List randomUniquelyNamedRoleDescriptors(int minSize, int maxSize) { + return randomValueOtherThanMany( + roleDescriptors -> roleDescriptors.stream().map(RoleDescriptor::getName).distinct().count() != roleDescriptors.size(), + () -> randomList(minSize, maxSize, () -> builder().build()) + ); + } + + public static RemoteClusterPermissions randomRemoteClusterPermissions(int maxGroups) { + final RemoteClusterPermissions remoteClusterPermissions = new RemoteClusterPermissions(); + final String[] supportedPermissions = RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]); + for (int i = 0; i < maxGroups; i++) { + remoteClusterPermissions.addGroup( + new RemoteClusterPermissionGroup( + randomNonEmptySubsetOf(Arrays.asList(supportedPermissions)).toArray(new String[0]), + generateRandomStringArray(5, randomIntBetween(3, 9), false, false) + ) + ); + } + return remoteClusterPermissions; + } + + public static class Builder { + + private boolean allowReservedMetadata = false; + private boolean allowRemoteIndices = false; + private boolean alwaysIncludeRemoteIndices = false; + private boolean allowRestriction = false; + private boolean allowDescription = false; + private boolean allowRemoteClusters = false; + + public Builder() {} + + public Builder allowReservedMetadata(boolean allowReservedMetadata) { + this.allowReservedMetadata = allowReservedMetadata; + return this; + } + + public Builder alwaysIncludeRemoteIndices() { + this.alwaysIncludeRemoteIndices = true; + return this; + } + + public Builder allowRemoteIndices(boolean allowRemoteIndices) { + this.allowRemoteIndices = allowRemoteIndices; + return this; + } + + public Builder allowRestriction(boolean allowRestriction) { + this.allowRestriction = allowRestriction; + return this; + } + + public Builder allowDescription(boolean allowDescription) { + this.allowDescription = allowDescription; + return this; + } + + public Builder allowRemoteClusters(boolean allowRemoteClusters) { + this.allowRemoteClusters = allowRemoteClusters; + return this; + } + + public RoleDescriptor build() { + final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges; + if (alwaysIncludeRemoteIndices || (allowRemoteIndices && randomBoolean())) { + remoteIndexPrivileges = randomRemoteIndicesPrivileges(0, 3); + } else { + remoteIndexPrivileges = null; + } + + RemoteClusterPermissions remoteClusters = RemoteClusterPermissions.NONE; + if (allowRemoteClusters && randomBoolean()) { + remoteClusters = randomRemoteClusterPermissions(randomIntBetween(1, 5)); + } + + return new RoleDescriptor( + randomAlphaOfLengthBetween(3, 90), + randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), + randomIndicesPrivileges(0, 3), + randomApplicationPrivileges(), + randomClusterPrivileges(), + generateRandomStringArray(5, randomIntBetween(2, 8), false, true), + randomRoleDescriptorMetadata(allowReservedMetadata), + Map.of(), + remoteIndexPrivileges, + remoteClusters, + allowRestriction ? RoleRestrictionTests.randomWorkflowsRestriction(1, 3) : null, + allowDescription ? randomAlphaOfLengthBetween(0, 20) : null + ); + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java index a3a590dc5a4d4..d7b9f9ddd5b58 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java @@ -31,33 +31,24 @@ import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; -import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; -import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; -import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.hamcrest.Matchers; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import java.util.Set; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_CLUSTER_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_INDICES_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_CLUSTER_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_INDICES_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.ROLE_DESCRIPTOR_NAME; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.WORKFLOWS_RESTRICTION_VERSION; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivilegesBuilder; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -156,17 +147,18 @@ public void testToString() { + ", field_security=[grant=[body,title], except=null], query={\"match_all\": {}}],]" + ", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" + ", runAs=[sudo], metadata=[{}], remoteIndicesPrivileges=[], remoteClusterPrivileges=[]" - + ", restriction=Restriction[workflows=[]]]" + + ", restriction=Restriction[workflows=[]], description=]" ) ); } public void testToXContentRoundtrip() throws Exception { - final RoleDescriptor descriptor = randomRoleDescriptor(true, true, true, true); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.randomRoleDescriptor(); final XContentType xContentType = randomFrom(XContentType.values()); final BytesReference xContentValue = toShuffledXContent(descriptor, xContentType, ToXContent.EMPTY_PARAMS, false); final RoleDescriptor parsed = RoleDescriptor.parserBuilder() .allowRestriction(true) + .allowDescription(true) .build() .parse(descriptor.getName(), xContentValue, xContentType); assertThat(parsed, equalTo(descriptor)); @@ -268,9 +260,14 @@ public void testParse() throws Exception { ], "restriction":{ "workflows": ["search_application_query"] - } + }, + "description": "Lorem ipsum dolor sit amet, consectetur adipiscing elit." }"""; - rd = RoleDescriptor.parserBuilder().allowRestriction(true).build().parse("test", new BytesArray(q), XContentType.JSON); + rd = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .allowDescription(true) + .build() + .parse("test", new BytesArray(q), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(3, rd.getIndicesPrivileges().length); @@ -594,16 +591,18 @@ public void testSerializationForCurrentVersion() throws Exception { final boolean canIncludeRemoteIndices = version.onOrAfter(TransportVersions.V_8_8_0); final boolean canIncludeRemoteClusters = version.onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS); final boolean canIncludeWorkflows = version.onOrAfter(WORKFLOWS_RESTRICTION_VERSION); + final boolean canIncludeDescription = version.onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION); logger.info("Testing serialization with version {}", version); BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor( - true, - canIncludeRemoteIndices, - canIncludeWorkflows, - canIncludeRemoteClusters - ); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(canIncludeRemoteIndices) + .allowRestriction(canIncludeWorkflows) + .allowDescription(canIncludeDescription) + .allowRemoteClusters(canIncludeRemoteClusters) + .build(); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -626,7 +625,14 @@ public void testSerializationWithRemoteIndicesWithElderVersion() throws IOExcept final BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor(true, true, false, false); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(true) + .allowRestriction(false) + .allowDescription(false) + .allowRemoteClusters(false) + .build(); + descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -650,7 +656,8 @@ public void testSerializationWithRemoteIndicesWithElderVersion() throws IOExcept descriptor.getTransientMetadata(), null, null, - descriptor.getRestriction() + descriptor.getRestriction(), + descriptor.getDescription() ) ) ); @@ -671,7 +678,13 @@ public void testSerializationWithRemoteClusterWithElderVersion() throws IOExcept final BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor(true, false, false, true); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(false) + .allowRestriction(false) + .allowDescription(false) + .allowRemoteClusters(true) + .build(); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -693,9 +706,10 @@ public void testSerializationWithRemoteClusterWithElderVersion() throws IOExcept descriptor.getRunAs(), descriptor.getMetadata(), descriptor.getTransientMetadata(), + descriptor.getRemoteIndicesPrivileges(), null, - descriptor.getRemoteClusterPermissions(), - descriptor.getRestriction() + descriptor.getRestriction(), + descriptor.getDescription() ) ) ); @@ -715,7 +729,13 @@ public void testSerializationWithWorkflowsRestrictionAndUnsupportedVersions() th final BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor(true, false, true, false); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(false) + .allowRestriction(true) + .allowDescription(false) + .allowRemoteClusters(false) + .build(); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -739,7 +759,8 @@ public void testSerializationWithWorkflowsRestrictionAndUnsupportedVersions() th descriptor.getTransientMetadata(), descriptor.getRemoteIndicesPrivileges(), descriptor.getRemoteClusterPermissions(), - null + null, + descriptor.getDescription() ) ) ); @@ -793,6 +814,96 @@ public void testParseRoleWithRestrictionWhenAllowRestrictionIsTrue() throws IOEx assertThat(role.getRestriction().getWorkflows(), arrayContaining("search_application")); } + public void testSerializationWithDescriptionAndUnsupportedVersions() throws IOException { + final TransportVersion versionBeforeRoleDescription = TransportVersionUtils.getPreviousVersion( + TransportVersions.SECURITY_ROLE_DESCRIPTION + ); + final TransportVersion version = TransportVersionUtils.randomVersionBetween( + random(), + TransportVersions.V_7_17_0, + versionBeforeRoleDescription + ); + final BytesStreamOutput output = new BytesStreamOutput(); + output.setTransportVersion(version); + + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder().allowDescription(true).build(); + descriptor.writeTo(output); + final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); + StreamInput streamInput = new NamedWriteableAwareStreamInput( + ByteBufferStreamInput.wrap(BytesReference.toBytes(output.bytes())), + registry + ); + streamInput.setTransportVersion(version); + final RoleDescriptor serialized = new RoleDescriptor(streamInput); + if (descriptor.hasDescription()) { + assertThat( + serialized, + equalTo( + new RoleDescriptor( + descriptor.getName(), + descriptor.getClusterPrivileges(), + descriptor.getIndicesPrivileges(), + descriptor.getApplicationPrivileges(), + descriptor.getConditionalClusterPrivileges(), + descriptor.getRunAs(), + descriptor.getMetadata(), + descriptor.getTransientMetadata(), + descriptor.getRemoteIndicesPrivileges(), + descriptor.getRemoteClusterPermissions(), + descriptor.getRestriction(), + null + ) + ) + ); + } else { + assertThat(descriptor, equalTo(serialized)); + } + } + + public void testParseRoleWithDescriptionFailsWhenAllowDescriptionIsFalse() { + final String json = """ + { + "description": "Lorem ipsum", + "cluster": ["manage_security"] + }"""; + final ElasticsearchParseException e = expectThrows( + ElasticsearchParseException.class, + () -> RoleDescriptor.parserBuilder() + .allowRestriction(randomBoolean()) + .allowDescription(false) + .build() + .parse( + "test_role_with_description", + XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON) + ) + ); + assertThat( + e, + TestMatchers.throwableWithMessage( + containsString("failed to parse role [test_role_with_description]. unexpected field [description]") + ) + ); + } + + public void testParseRoleWithDescriptionWhenAllowDescriptionIsTrue() throws IOException { + final String json = """ + { + "description": "Lorem ipsum", + "cluster": ["manage_security"] + }"""; + RoleDescriptor role = RoleDescriptor.parserBuilder() + .allowRestriction(randomBoolean()) + .allowDescription(true) + .build() + .parse( + "test_role_with_description", + XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON) + ); + assertThat(role.getName(), equalTo("test_role_with_description")); + assertThat(role.getDescription(), equalTo("Lorem ipsum")); + assertThat(role.getClusterPrivileges(), arrayContaining("manage_security")); + } + public void testParseEmptyQuery() throws Exception { String json = """ { @@ -1148,6 +1259,7 @@ public void testIsEmpty() { new HashMap<>(), new RoleDescriptor.RemoteIndicesPrivileges[0], RemoteClusterPermissions.NONE, + null, null ).isEmpty() ); @@ -1189,7 +1301,8 @@ public void testIsEmpty() { : new RoleDescriptor.RemoteIndicesPrivileges[] { RoleDescriptor.RemoteIndicesPrivileges.builder("rmt").indices("idx").privileges("foo").build() }, booleans.get(7) ? null : randomRemoteClusterPermissions(5), - booleans.get(8) ? null : RoleRestrictionTests.randomWorkflowsRestriction(1, 2) + booleans.get(8) ? null : RoleRestrictionTests.randomWorkflowsRestriction(1, 2), + randomAlphaOfLengthBetween(0, 20) ); if (booleans.stream().anyMatch(e -> e.equals(false))) { @@ -1212,11 +1325,18 @@ public void testHasPrivilegesOtherThanIndex() { null, null, null, + null, null ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), is(false) ); - final RoleDescriptor roleDescriptor = randomRoleDescriptor(); + final RoleDescriptor roleDescriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(true) + .allowRestriction(true) + .allowDescription(true) + .allowRemoteClusters(true) + .build(); final boolean expected = roleDescriptor.hasClusterPrivileges() || roleDescriptor.hasConfigurableClusterPrivileges() || roleDescriptor.hasApplicationPrivileges() @@ -1225,234 +1345,8 @@ public void testHasPrivilegesOtherThanIndex() { assertThat(roleDescriptor.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), equalTo(expected)); } - public static List randomUniquelyNamedRoleDescriptors(int minSize, int maxSize) { - return randomValueOtherThanMany( - roleDescriptors -> roleDescriptors.stream().map(RoleDescriptor::getName).distinct().count() != roleDescriptors.size(), - () -> randomList(minSize, maxSize, () -> randomRoleDescriptor(false)) - ); - } - - public static RoleDescriptor randomRoleDescriptor() { - return randomRoleDescriptor(true); - } - - public static RoleDescriptor randomRoleDescriptor(boolean allowReservedMetadata) { - return randomRoleDescriptor(allowReservedMetadata, false, false, false); - } - - public static RoleDescriptor randomRoleDescriptor( - boolean allowReservedMetadata, - boolean allowRemoteIndices, - boolean allowWorkflows, - boolean allowRemoteClusters - ) { - final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges; - if (false == allowRemoteIndices || randomBoolean()) { - remoteIndexPrivileges = null; - } else { - remoteIndexPrivileges = randomRemoteIndicesPrivileges(0, 3); - } - - RemoteClusterPermissions remoteClusters = RemoteClusterPermissions.NONE; - if (allowRemoteClusters && randomBoolean()) { - randomRemoteClusterPermissions(randomIntBetween(1, 5)); - } - - return new RoleDescriptor( - randomAlphaOfLengthBetween(3, 90), - randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), - randomIndicesPrivileges(0, 3), - randomApplicationPrivileges(), - randomClusterPrivileges(), - generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - randomRoleDescriptorMetadata(allowReservedMetadata), - Map.of(), - remoteIndexPrivileges, - remoteClusters, - allowWorkflows ? RoleRestrictionTests.randomWorkflowsRestriction(1, 3) : null - ); - } - - public static Map randomRoleDescriptorMetadata(boolean allowReservedMetadata) { - final Map metadata = new HashMap<>(); - while (randomBoolean()) { - String key = randomAlphaOfLengthBetween(4, 12); - if (allowReservedMetadata && randomBoolean()) { - key = MetadataUtils.RESERVED_PREFIX + key; - } - final Object value = randomBoolean() ? randomInt() : randomAlphaOfLengthBetween(3, 50); - metadata.put(key, value); - } - return metadata; - } - - public static ConfigurableClusterPrivilege[] randomClusterPrivileges() { - final ConfigurableClusterPrivilege[] configurableClusterPrivileges = switch (randomIntBetween(0, 4)) { - case 0 -> new ConfigurableClusterPrivilege[0]; - case 1 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - case 2 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - case 3 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ), - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - case 4 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ), - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - default -> throw new IllegalStateException("Unexpected value"); - }; - return configurableClusterPrivileges; - } - - public static ApplicationResourcePrivileges[] randomApplicationPrivileges() { - final ApplicationResourcePrivileges[] applicationPrivileges = new ApplicationResourcePrivileges[randomIntBetween(0, 2)]; - for (int i = 0; i < applicationPrivileges.length; i++) { - final ApplicationResourcePrivileges.Builder builder = ApplicationResourcePrivileges.builder(); - builder.application("app" + randomAlphaOfLengthBetween(5, 12) + (randomBoolean() ? "*" : "")); - if (randomBoolean()) { - builder.privileges("*"); - } else { - builder.privileges(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); - } - if (randomBoolean()) { - builder.resources("*"); - } else { - builder.resources(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); - } - applicationPrivileges[i] = builder.build(); - } - return applicationPrivileges; - } - - public static RemoteClusterPermissions randomRemoteClusterPermissions(int maxGroups) { - final RemoteClusterPermissions remoteClusterPermissions = new RemoteClusterPermissions(); - final String[] supportedPermissions = RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]); - for (int i = 0; i < maxGroups; i++) { - remoteClusterPermissions.addGroup( - new RemoteClusterPermissionGroup( - randomNonEmptySubsetOf(Arrays.asList(supportedPermissions)).toArray(new String[0]), - generateRandomStringArray(5, randomIntBetween(3, 9), false, false) - ) - ); - } - return remoteClusterPermissions; - } - - public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max) { - return randomRemoteIndicesPrivileges(min, max, Set.of()); - } - - public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max, Set excludedPrivileges) { - final RoleDescriptor.IndicesPrivileges[] innerIndexPrivileges = randomIndicesPrivileges(min, max, excludedPrivileges); - final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges = - new RoleDescriptor.RemoteIndicesPrivileges[innerIndexPrivileges.length]; - for (int i = 0; i < remoteIndexPrivileges.length; i++) { - remoteIndexPrivileges[i] = new RoleDescriptor.RemoteIndicesPrivileges( - innerIndexPrivileges[i], - generateRandomStringArray(5, randomIntBetween(3, 9), false, false) - ); - } - return remoteIndexPrivileges; - } - - public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max) { - return randomIndicesPrivileges(min, max, Set.of()); - } - - public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max, Set excludedPrivileges) { - final RoleDescriptor.IndicesPrivileges[] indexPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(min, max)]; - for (int i = 0; i < indexPrivileges.length; i++) { - indexPrivileges[i] = randomIndicesPrivilegesBuilder(excludedPrivileges).build(); - } - return indexPrivileges; - } - - private static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder() { - return randomIndicesPrivilegesBuilder(Set.of()); - } - - private static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder(Set excludedPrivileges) { - final Set candidatePrivilegesNames = Sets.difference(IndexPrivilege.names(), excludedPrivileges); - assert false == candidatePrivilegesNames.isEmpty() : "no candidate privilege names to random from"; - final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() - .privileges(randomSubsetOf(randomIntBetween(1, 4), candidatePrivilegesNames)) - .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) - .allowRestrictedIndices(randomBoolean()); - randomDlsFls(builder); - return builder; - } - - private static void randomDlsFls(RoleDescriptor.IndicesPrivileges.Builder builder) { - if (randomBoolean()) { - builder.query( - randomBoolean() - ? "{ \"term\": { \"" + randomAlphaOfLengthBetween(3, 24) + "\" : \"" + randomAlphaOfLengthBetween(3, 24) + "\" }" - : "{ \"match_all\": {} }" - ); - } - if (randomBoolean()) { - if (randomBoolean()) { - builder.grantedFields("*"); - builder.deniedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); - } else { - builder.grantedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); - } - } - } - private static void resetFieldPermssionsCache() { RoleDescriptor.setFieldPermissionsCache(new FieldPermissionsCache(Settings.EMPTY)); } - public static RoleDescriptor randomCrossClusterAccessRoleDescriptor() { - final int searchSize = randomIntBetween(0, 3); - final int replicationSize = randomIntBetween(searchSize == 0 ? 1 : 0, 3); - assert searchSize + replicationSize > 0; - - final String[] clusterPrivileges; - if (searchSize > 0 && replicationSize > 0) { - clusterPrivileges = CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; - } else if (searchSize > 0) { - clusterPrivileges = CCS_CLUSTER_PRIVILEGE_NAMES; - } else { - clusterPrivileges = CCR_CLUSTER_PRIVILEGE_NAMES; - } - - final List indexPrivileges = new ArrayList<>(); - for (int i = 0; i < searchSize; i++) { - final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() - .privileges(CCS_INDICES_PRIVILEGE_NAMES) - .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) - .allowRestrictedIndices(randomBoolean()); - randomDlsFls(builder); - indexPrivileges.add(builder.build()); - } - for (int i = 0; i < replicationSize; i++) { - final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() - .privileges(CCR_INDICES_PRIVILEGE_NAMES) - .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) - .allowRestrictedIndices(randomBoolean()); - indexPrivileges.add(builder.build()); - } - - return new RoleDescriptor( - ROLE_DESCRIPTOR_NAME, - clusterPrivileges, - indexPrivileges.toArray(RoleDescriptor.IndicesPrivileges[]::new), - null - ); - } - } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java index 6f8691fbb317a..a892e8b864e6e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java @@ -27,7 +27,7 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.equalTo; public class RoleDescriptorsIntersectionTests extends ESTestCase { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java index 0c15256d1951e..5401be220fe8b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java @@ -276,7 +276,8 @@ public void testForWorkflowWithRestriction() { null, null, null, - new RoleDescriptor.Restriction(new String[] { WorkflowResolver.SEARCH_APPLICATION_QUERY_WORKFLOW.name() }) + new RoleDescriptor.Restriction(new String[] { WorkflowResolver.SEARCH_APPLICATION_QUERY_WORKFLOW.name() }), + null ), new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES, @@ -290,7 +291,7 @@ public void testForWorkflowWithRestriction() { public void testForWorkflowWithoutRestriction() { final SimpleRole role = Role.buildFromRoleDescriptor( - new RoleDescriptor("r1", null, null, null, null, null, null, null, null, null, null), + new RoleDescriptor("r1", null, null, null, null, null, null, null, null, null, null, null), new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES, List.of() diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java index 554c82dfa44fb..74c8e6addf243 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import java.util.Set; import java.util.concurrent.ExecutionException; @@ -82,7 +82,7 @@ public void testCrossClusterAccessRoleReference() { } public void testFixedRoleReference() throws ExecutionException, InterruptedException { - final RoleDescriptor roleDescriptor = RoleDescriptorTests.randomRoleDescriptor(); + final RoleDescriptor roleDescriptor = RoleDescriptorTestHelper.randomRoleDescriptor(); final String source = "source"; final var fixedRoleReference = new RoleReference.FixedRoleReference(roleDescriptor, source); diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json index 3eae6c1fa4f5a..1951431859ffe 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json @@ -189,8 +189,7 @@ "type": "geo_point" }, "path_match": [ - "location", - "*.location" + "*.geo.location" ] } }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json index 88c51a9aef284..85a744200162c 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json @@ -53,6 +53,9 @@ }, "completed_at": { "type": "date" + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json index 2b3ecbac92352..8702a098da826 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json @@ -56,6 +56,9 @@ "type": "binary" } } + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json index 6a89d7874c073..ad66ad8796862 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json @@ -316,6 +316,9 @@ }, "tags": { "type": "keyword" + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json index 6be455e02825a..b2a116c0c592e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json @@ -33,6 +33,9 @@ }, "updated_at": { "type": "date" + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json index 698e4359e73c1..20e9ccf8daff3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json @@ -38,6 +38,9 @@ "last": { "type": "boolean", "index": false + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json index 268e53a9470a8..9bf0c8b23f5ad 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json @@ -25,6 +25,9 @@ "@timestamp": { "type": "date" }, + "namespaces": { + "type": "keyword" + }, "upload_start": { "type": "date" }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json index 7247920e5e293..7c990600749d3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json @@ -25,6 +25,9 @@ "@timestamp": { "type": "date" }, + "namespaces": { + "type": "keyword" + }, "data": { "type": "binary", "store": true diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json index bdf7e4d00d869..84a3fe05777a9 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json @@ -25,6 +25,9 @@ "@timestamp": { "type": "date" }, + "namespaces": { + "type": "keyword" + }, "agent_id": { "type": "keyword" }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json index 44e2e67dd06c3..79b4ed0109f32 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json @@ -13,6 +13,9 @@ "coordinator_idx": { "type": "integer" }, + "namespaces": { + "type": "keyword" + }, "data": { "enabled": false, "type": "object" diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json new file mode 100644 index 0000000000000..933d7681c92e8 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json @@ -0,0 +1,14 @@ +{ + "template": { + "settings": { + "number_of_shards": 1, + "auto_expand_replicas": "0-1" + } + }, + "_meta": { + "description": "default kibana reporting settings installed by elasticsearch", + "managed": true + }, + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json index 9c4da646c3399..240ad36199fe3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json @@ -5,14 +5,10 @@ "hidden": true }, "allow_auto_create": true, - "composed_of": ["kibana-reporting@custom"], + "composed_of": ["kibana-reporting@settings", "kibana-reporting@custom"], "ignore_missing_component_templates": ["kibana-reporting@custom"], "template": { "lifecycle": {}, - "settings": { - "number_of_shards": 1, - "auto_expand_replicas": "0-1" - }, "mappings": { "properties": { "meta": { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json index c540a61c28f05..fbba399162ee0 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json @@ -10,11 +10,10 @@ "sort": { "field": [ "profiling.project.id", - "@timestamp", "orchestrator.resource.name", + "host.name", "container.name", - "process.thread.name", - "host.id" + "process.thread.name" ] } }, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 99240d6b6d49d..333ef30f078e6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -823,7 +823,7 @@ public void updateConnectorNative(UpdateConnectorNativeAction.Request request, A Connector.IS_NATIVE_FIELD.getPreferredName(), request.isNative(), Connector.STATUS_FIELD.getPreferredName(), - ConnectorStatus.CONFIGURED + ConnectorStatus.CONFIGURED.toString() ) ) @@ -969,7 +969,7 @@ public void updateConnectorServiceType(UpdateConnectorServiceTypeAction.Request Connector.SERVICE_TYPE_FIELD.getPreferredName(), request.getServiceType(), Connector.STATUS_FIELD.getPreferredName(), - newStatus + newStatus.toString() ) ) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index f5ab8309e27e7..4316b4bccd9bc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -266,7 +266,7 @@ public void cancelConnectorSyncJob(String connectorSyncJobId, ActionListener contexts, @@ -54,11 +48,7 @@ public Factory( int taskConcurrency, int limit ) { - this.limit = limit; - this.dataPartitioning = dataPartitioning; - var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); - this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit); } @Override @@ -66,15 +56,6 @@ public SourceOperator get(DriverContext driverContext) { return new LuceneCountOperator(driverContext.blockFactory(), sliceQueue, limit); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - - public int limit() { - return limit; - } - @Override public String describe() { return "LuceneCountOperator[dataPartitioning = " + dataPartitioning + ", limit = " + limit + "]"; @@ -118,7 +99,7 @@ public void finish() { } @Override - public Page getOutput() { + protected Page getCheckedOutput() throws IOException { if (isFinished()) { assert remainingDocs <= 0 : remainingDocs; return null; @@ -170,8 +151,6 @@ public Page getOutput() { } } return page; - } catch (IOException e) { - throw new UncheckedIOException(e); } finally { processingNanos += System.nanoTime() - start; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 9a7abb2aafc58..fae0a86762b92 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.TimeValue; @@ -34,6 +35,7 @@ import java.io.UncheckedIOException; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Objects; import java.util.Set; import java.util.TreeSet; @@ -72,10 +74,46 @@ protected LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSlice this.sliceQueue = sliceQueue; } - public interface Factory extends SourceOperator.SourceOperatorFactory { - int taskConcurrency(); + public abstract static class Factory implements SourceOperator.SourceOperatorFactory { + protected final DataPartitioning dataPartitioning; + protected final int taskConcurrency; + protected final int limit; + protected final LuceneSliceQueue sliceQueue; + + protected Factory( + List contexts, + Function queryFunction, + DataPartitioning dataPartitioning, + int taskConcurrency, + int limit + ) { + this.limit = limit; + this.dataPartitioning = dataPartitioning; + var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); + this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); + this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); + } + + public final int taskConcurrency() { + return taskConcurrency; + } + + public final int limit() { + return limit; + } } + @Override + public final Page getOutput() { + try { + return getCheckedOutput(); + } catch (IOException ioe) { + throw new UncheckedIOException(ioe); + } + } + + protected abstract Page getCheckedOutput() throws IOException; + @Override public void close() {} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index 9b942114e61f2..64836b00a7e1b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreMode; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.IntBlock; @@ -21,7 +20,6 @@ import org.elasticsearch.core.Releasables; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.List; import java.util.function.Function; @@ -37,12 +35,9 @@ public class LuceneSourceOperator extends LuceneOperator { private final LeafCollector leafCollector; private final int minPageSize; - public static class Factory implements LuceneOperator.Factory { - private final DataPartitioning dataPartitioning; - private final int taskConcurrency; + public static class Factory extends LuceneOperator.Factory { + private final int maxPageSize; - private final int limit; - private final LuceneSliceQueue sliceQueue; public Factory( List contexts, @@ -52,12 +47,8 @@ public Factory( int maxPageSize, int limit ) { + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit); this.maxPageSize = maxPageSize; - this.limit = limit; - this.dataPartitioning = dataPartitioning; - var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); - this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @Override @@ -65,19 +56,10 @@ public SourceOperator get(DriverContext driverContext) { return new LuceneSourceOperator(driverContext.blockFactory(), maxPageSize, sliceQueue, limit); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - public int maxPageSize() { return maxPageSize; } - public int limit() { - return limit; - } - @Override public String describe() { return "LuceneSourceOperator[dataPartitioning = " @@ -123,7 +105,7 @@ public void finish() { } @Override - public Page getOutput() { + public Page getCheckedOutput() throws IOException { if (isFinished()) { assert currentPagePos == 0 : currentPagePos; return null; @@ -162,8 +144,6 @@ public Page getOutput() { currentPagePos = 0; } return page; - } catch (IOException e) { - throw new UncheckedIOException(e); } finally { processingNanos += System.nanoTime() - start; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 2c22d850daf0c..e9fb15d265fbe 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopFieldCollector; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; @@ -28,7 +27,6 @@ import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.Arrays; import java.util.List; import java.util.Optional; @@ -39,13 +37,10 @@ * Source operator that builds Pages out of the output of a TopFieldCollector (aka TopN) */ public final class LuceneTopNSourceOperator extends LuceneOperator { - public static final class Factory implements LuceneOperator.Factory { - private final int taskConcurrency; + public static final class Factory extends LuceneOperator.Factory { + ; private final int maxPageSize; private final List> sorts; - private final int limit; - private final DataPartitioning dataPartitioning; - private final LuceneSliceQueue sliceQueue; public Factory( List contexts, @@ -56,13 +51,9 @@ public Factory( int limit, List> sorts ) { + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit); this.maxPageSize = maxPageSize; this.sorts = sorts; - this.limit = limit; - this.dataPartitioning = dataPartitioning; - var weightFunction = weightFunction(queryFunction, ScoreMode.TOP_DOCS); - this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); - this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @Override @@ -70,19 +61,10 @@ public SourceOperator get(DriverContext driverContext) { return new LuceneTopNSourceOperator(driverContext.blockFactory(), maxPageSize, sorts, limit, sliceQueue); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - public int maxPageSize() { return maxPageSize; } - public int limit() { - return limit; - } - @Override public String describe() { String notPrettySorts = sorts.stream().map(Strings::toString).collect(Collectors.joining(",")); @@ -136,7 +118,7 @@ public void finish() { } @Override - public Page getOutput() { + public Page getCheckedOutput() throws IOException { if (isFinished()) { return null; } @@ -152,7 +134,7 @@ public Page getOutput() { } } - private Page collect() { + private Page collect() throws IOException { assert doneCollecting == false; var scorer = getCurrentOrLoadNextScorer(); if (scorer == null) { @@ -169,8 +151,6 @@ private Page collect() { } catch (CollectionTerminatedException cte) { // Lucene terminated early the collection (doing topN for an index that's sorted and the topN uses the same sorting) scorer.markAsDone(); - } catch (IOException e) { - throw new UncheckedIOException(e); } if (scorer.isDone()) { var nextScorer = getCurrentOrLoadNextScorer(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java index 58f2c8de67b61..899060dae5fbb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; @@ -48,13 +47,23 @@ * This operator currently only supports shard level concurrency. A new concurrency mechanism should be introduced at the time serie level * in order to read tsdb indices in parallel. */ -public record TimeSeriesSortedSourceOperatorFactory( - int limit, - int maxPageSize, - int taskConcurrency, - TimeValue timeSeriesPeriod, - LuceneSliceQueue sliceQueue -) implements LuceneOperator.Factory { +public class TimeSeriesSortedSourceOperatorFactory extends LuceneOperator.Factory { + + private final int maxPageSize; + private final TimeValue timeSeriesPeriod; + + private TimeSeriesSortedSourceOperatorFactory( + List contexts, + Function queryFunction, + int taskConcurrency, + int maxPageSize, + TimeValue timeSeriesPeriod, + int limit + ) { + super(contexts, queryFunction, DataPartitioning.SHARD, taskConcurrency, limit); + this.maxPageSize = maxPageSize; + this.timeSeriesPeriod = timeSeriesPeriod; + } @Override public SourceOperator get(DriverContext driverContext) { @@ -62,11 +71,6 @@ public SourceOperator get(DriverContext driverContext) { return new Impl(driverContext.blockFactory(), sliceQueue, maxPageSize, limit, rounding); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - @Override public String describe() { return "TimeSeriesSortedSourceOperator[maxPageSize = " + maxPageSize + ", limit = " + limit + "]"; @@ -80,10 +84,14 @@ public static TimeSeriesSortedSourceOperatorFactory create( List searchContexts, Function queryFunction ) { - var weightFunction = LuceneOperator.weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - var sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, DataPartitioning.SHARD, taskConcurrency); - taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); - return new TimeSeriesSortedSourceOperatorFactory(limit, maxPageSize, taskConcurrency, timeSeriesPeriod, sliceQueue); + return new TimeSeriesSortedSourceOperatorFactory( + searchContexts, + queryFunction, + taskConcurrency, + maxPageSize, + timeSeriesPeriod, + limit + ); } static final class Impl extends SourceOperator { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java new file mode 100644 index 0000000000000..bb8d3fd269a8a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; +import org.elasticsearch.compute.data.ElementType; + +import java.util.ArrayList; +import java.util.List; + +/** + * This class provides operator factories for time-series aggregations. + * A time-series aggregation executes in three stages, deviating from the typical two-stage aggregation. + * For example: {@code sum(rate(write_requests)), avg(cpu) BY cluster, time-bucket} + * + * 1. Initial Stage: + * In this stage, a standard hash aggregation is executed, grouped by tsid and time-bucket. + * The {@code values} aggregations are added to collect values of the grouping keys excluding the time-bucket, + * which are then used for final result grouping. + * {@code rate[INITIAL](write_requests), avg[INITIAL](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 2. Intermediate Stage: + * Equivalent to the final mode of a standard hash aggregation. + * This stage merges and reduces the result of the rate aggregations, + * but merges (without reducing) the results of non-rate aggregations. + * {@code rate[FINAL](write_requests), avg[INTERMEDIATE](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 3. Final Stage: + * This extra stage performs outer aggregations over the rate results + * and combines the intermediate results of non-rate aggregations using the specified user-defined grouping keys. + * {@code sum[SINGLE](rate_result), avg[FINAL](cpu) BY cluster, bucket} + */ +public final class TimeSeriesAggregationOperatorFactories { + + public record Initial( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + return new HashAggregationOperator( + aggregators, + () -> new TimeSeriesBlockHash(tsHashChannel, timeBucketChannel, driverContext), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesInitialAggregationOperatorFactory"; + } + } + + public record Intermediate( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INTERMEDIATE)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + List hashGroups = List.of( + new BlockHash.GroupSpec(tsHashChannel, ElementType.BYTES_REF), + new BlockHash.GroupSpec(timeBucketChannel, ElementType.LONG) + ); + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(hashGroups, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesIntermediateAggregationOperatorFactory"; + } + } + + public record Final( + List groupings, + List outerRates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(outerRates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : outerRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(groupings, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesFinalAggregationOperatorFactory"; + } + } + + static List valuesAggregatorForGroupings(List groupings, int timeBucketChannel) { + List aggregators = new ArrayList<>(); + for (BlockHash.GroupSpec g : groupings) { + if (g.channel() != timeBucketChannel) { + final List channels = List.of(g.channel()); + // TODO: perhaps introduce a specialized aggregator for this? + var aggregatorSupplier = (switch (g.elementType()) { + case BYTES_REF -> new org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier(channels); + case DOUBLE -> new org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier(channels); + case INT -> new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(channels); + case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(channels); + case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(channels); + case NULL, DOC, UNKNOWN -> throw new IllegalArgumentException("unsupported grouping type"); + }); + aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + } + return aggregators; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java deleted file mode 100644 index 0cf0854a9b0c7..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.aggregation.GroupingAggregator; -import org.elasticsearch.compute.aggregation.blockhash.BlockHash; -import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; -import org.elasticsearch.core.TimeValue; - -import java.util.List; - -public record TimeSeriesAggregationOperatorFactory( - AggregatorMode mode, - int tsHashChannel, - int timestampIntervalChannel, - TimeValue timeSeriesPeriod, - List aggregators, - int maxPageSize -) implements Operator.OperatorFactory { - - @Override - public String describe() { - return "TimeSeriesAggregationOperator[mode=" - + mode - + ", tsHashChannel = " - + tsHashChannel - + ", timestampIntervalChannel = " - + timestampIntervalChannel - + ", timeSeriesPeriod = " - + timeSeriesPeriod - + ", maxPageSize = " - + maxPageSize - + "]"; - } - - @Override - public Operator get(DriverContext driverContext) { - BlockHash blockHash = new TimeSeriesBlockHash(tsHashChannel, timestampIntervalChannel, driverContext); - return new HashAggregationOperator(aggregators, () -> blockHash, driverContext); - } - -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index f1698ea401d28..adce8d8a88407 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.compute.data.Page; @@ -17,6 +18,7 @@ import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.transport.TransportException; +import java.util.List; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -89,6 +91,20 @@ public int bufferSize() { } } + public void addCompletionListener(ActionListener listener) { + buffer.addCompletionListener(ActionListener.running(() -> { + try (RefCountingListener refs = new RefCountingListener(listener)) { + for (PendingInstances pending : List.of(outstandingSinks, outstandingSources)) { + // Create an outstanding instance and then finish to complete the completionListener + // if we haven't registered any instances of exchange sinks or exchange sources before. + pending.trackNewInstance(); + pending.completion.addListener(refs.acquire()); + pending.finishInstance(); + } + } + })); + } + /** * Create a new {@link ExchangeSource} for exchanging data * @@ -253,10 +269,10 @@ public Releasable addEmptySink() { private static class PendingInstances { private final AtomicInteger instances = new AtomicInteger(); - private final Releasable onComplete; + private final SubscribableListener completion = new SubscribableListener<>(); - PendingInstances(Releasable onComplete) { - this.onComplete = onComplete; + PendingInstances(Runnable onComplete) { + completion.addListener(ActionListener.running(onComplete)); } void trackNewInstance() { @@ -268,7 +284,7 @@ void finishInstance() { int refs = instances.decrementAndGet(); assert refs >= 0; if (refs == 0) { - onComplete.close(); + completion.onResponse(null); } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java index ae43e3954935d..86bfec5120945 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java @@ -42,9 +42,8 @@ public class BlockAccountingTests extends ComputeTestCase { public void testBooleanVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newBooleanArrayVector(new boolean[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 1); @@ -62,9 +61,8 @@ public void testBooleanVector() { public void testIntVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newIntArrayVector(new int[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newIntArrayVector(new int[] { randomInt() }, 1); @@ -82,9 +80,8 @@ public void testIntVector() { public void testLongVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newLongArrayVector(new long[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newLongArrayVector(new long[] { randomLong() }, 1); @@ -103,9 +100,8 @@ public void testLongVector() { public void testDoubleVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newDoubleArrayVector(new double[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newDoubleArrayVector(new double[] { randomDouble() }, 1); @@ -127,9 +123,8 @@ public void testBytesRefVector() { var emptyArray = new BytesRefArray(0, blockFactory.bigArrays()); var arrayWithOne = new BytesRefArray(0, blockFactory.bigArrays()); Vector emptyVector = blockFactory.newBytesRefArrayVector(emptyArray, 0); - long expectedEmptyVectorUsed = RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BytesRefVectorBlock.class - ); + long expectedEmptyVectorUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); assertThat(emptyVector.ramBytesUsed(), is(expectedEmptyVectorUsed)); var bytesRef = new BytesRef(randomAlphaOfLengthBetween(1, 16)); @@ -146,9 +141,8 @@ public void testBytesRefVector() { public void testBooleanBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new BooleanArrayBlock(new boolean[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new BooleanArrayBlock( @@ -194,18 +188,16 @@ public void testBooleanBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), lessThanOrEqualTo(expectedEmptyUsed)); } public void testIntBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new IntArrayBlock( @@ -242,18 +234,16 @@ public void testIntBlock() { public void testIntBlockWithNullFirstValues() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testLongBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new LongArrayBlock(new long[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new LongArrayBlock( @@ -299,18 +289,16 @@ public void testLongBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testDoubleBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new DoubleArrayBlock(new double[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new DoubleArrayBlock( @@ -356,9 +344,8 @@ public void testDoubleBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index 79135b12b2a83..573c960e86b9c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -11,65 +11,49 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; -import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorTests; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.hamcrest.Matcher; import org.junit.After; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; +import java.util.stream.IntStream; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.createTimeSeriesSourceOperator; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.writeTS; -import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.equalTo; -public class TimeSeriesAggregationOperatorTests extends AnyOperatorTestCase { +public class TimeSeriesAggregationOperatorTests extends ComputeTestCase { - private IndexReader reader; - private final Directory directory = newDirectory(); + private IndexReader reader = null; + private Directory directory = null; @After public void cleanup() throws IOException { IOUtils.close(reader, directory); } - @Override - protected Operator.OperatorFactory simple() { - return new TimeSeriesAggregationOperatorFactory(AggregatorMode.FINAL, 0, 1, TimeValue.ZERO, List.of(), 100); + /** + * A {@link DriverContext} with a nonBreakingBigArrays. + */ + protected DriverContext driverContext() { // TODO make this final once all operators support memory tracking + BlockFactory blockFactory = blockFactory(); + return new DriverContext(blockFactory.bigArrays(), blockFactory); } - @Override - protected Matcher expectedDescriptionOfSimple() { - return equalTo( - "TimeSeriesAggregationOperator[mode=FINAL, tsHashChannel = 0, timestampIntervalChannel = 1, " - + "timeSeriesPeriod = 0s, maxPageSize = 100]" - ); - } - - @Override - protected Matcher expectedToStringOfSimple() { - return equalTo( - "HashAggregationOperator[blockHash=TimeSeriesBlockHash{keys=[BytesRefKey[channel=0], " - + "LongKey[channel=1]], entries=-1b}, aggregators=[]]" - ); - } - - public void testBasicRate() { + public void testBasicRate() throws Exception { long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; @@ -78,25 +62,51 @@ public void testBasicRate() { long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - long unit = between(1, 5); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(unit), TimeValue.ZERO); - assertThat( - actualRates, - equalTo( - Map.of( - new Group("\u0001\u0003pods\u0002p1", 0), - 35.0 * unit / 111.0, - new Group("\u0001\u0003pods\u0002p2", 0), - 42.0 * unit / 13.0, - new Group("\u0001\u0003pods\u0002p3", 0), - 10.0 * unit / 20.0 - ) - ) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(2100, t1, v1)), + new Pod("p2", "cluster_1", new Interval(600, t2, v2)), + new Pod("p3", "cluster_2", new Interval(1100, t3, v3)) ); + long unit = between(1, 5); + { + List> actual = runRateTest( + pods, + List.of("cluster"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 35.0 * unit / 111.0 + 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest(pods, List.of("pod"), TimeValue.timeValueMillis(unit), TimeValue.timeValueMillis(500)); + List> expected = List.of( + List.of(new BytesRef("p1"), 35.0 * unit / 111.0), + List.of(new BytesRef("p2"), 42.0 * unit / 13.0), + List.of(new BytesRef("p3"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest( + pods, + List.of("cluster", "bucket"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 2000L, 35.0 * unit / 111.0), + List.of(new BytesRef("cluster_1"), 500L, 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 1000L, 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } } - public void testRateWithInterval() { + public void testRateWithInterval() throws Exception { long[] v1 = { 1, 2, 3, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3 }; long[] t1 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; @@ -105,59 +115,71 @@ public void testRateWithInterval() { long[] v3 = { 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192 }; long[] t3 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1)); - assertMap( - actualRates, - matchesMap().entry(new Group("\u0001\u0003pods\u0002p1", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p1", 60_000), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p1", 0), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p2", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 60_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 0), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 60_000), 0.07936D) - .entry(new Group("\u0001\u0003pods\u0002p3", 0), 0.00124D) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(0, t1, v1)), + new Pod("p2", "cluster_2", new Interval(0, t2, v2)), + new Pod("p3", "cluster_2", new Interval(0, t3, v3)) + ); + List> actual = runRateTest( + pods, + List.of("pod", "bucket"), + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1) + ); + List> expected = List.of( + List.of(new BytesRef("p1]"), 120_000L, 0.0D), + List.of(new BytesRef("p1"), 60_000L, 8.0E-5D), + List.of(new BytesRef("p1"), 0, 8.0E-5D), + List.of(new BytesRef("p2"), 120_000L, 0.0D), + List.of(new BytesRef("p2"), 60_000L, 0.0D), + List.of(new BytesRef("p2"), 0L, 0.0D), + List.of(new BytesRef("p3"), 120_000L, 0.0D), + List.of(new BytesRef("p3"), 60_000L, 0.07936D), + List.of(new BytesRef("p3"), 0L, 0.00124D) ); } - public void testRandomRate() { + public void testRandomRate() throws Exception { int numPods = between(1, 10); List pods = new ArrayList<>(); - Map expectedRates = new HashMap<>(); TimeValue unit = TimeValue.timeValueSeconds(1); + List> expected = new ArrayList<>(); for (int p = 0; p < numPods; p++) { - int numValues = between(2, 100); - long[] values = new long[numValues]; - long[] times = new long[numValues]; - long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - for (int i = 0; i < numValues; i++) { - values[i] = randomIntBetween(0, 100); - t += TimeValue.timeValueSeconds(between(1, 10)).millis(); - times[i] = t; + int numIntervals = randomIntBetween(1, 3); + Interval[] intervals = new Interval[numIntervals]; + long startTimeInHours = between(10, 100); + String podName = "p" + p; + for (int interval = 0; interval < numIntervals; interval++) { + final long startInterval = TimeValue.timeValueHours(--startTimeInHours).millis(); + int numValues = between(2, 100); + long[] values = new long[numValues]; + long[] times = new long[numValues]; + long delta = 0; + for (int i = 0; i < numValues; i++) { + values[i] = randomIntBetween(0, 100); + delta += TimeValue.timeValueSeconds(between(1, 10)).millis(); + times[i] = delta; + } + intervals[interval] = new Interval(startInterval, times, values); + if (numValues == 1) { + expected.add(List.of(new BytesRef(podName), startInterval, null)); + } else { + expected.add(List.of(new BytesRef(podName), startInterval, intervals[interval].expectedRate(unit))); + } } - Pod pod = new Pod("p" + p, times, values); + Pod pod = new Pod(podName, "cluster", intervals); pods.add(pod); - if (numValues == 1) { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), null); - } else { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), pod.expectedRate(unit)); - } } - Map actualRates = runRateTest(pods, unit, TimeValue.ZERO); - assertThat(actualRates, equalTo(expectedRates)); + List> actual = runRateTest(pods, List.of("pod", "bucket"), unit, TimeValue.timeValueHours(1)); + assertThat(actual, equalTo(expected)); } - record Pod(String name, long[] times, long[] values) { - Pod { - assert times.length == values.length : times.length + "!=" + values.length; - } - + record Interval(long offset, long[] times, long[] values) { double expectedRate(TimeValue unit) { double dv = 0; - for (int i = 0; i < values.length - 1; i++) { - if (values[i + 1] < values[i]) { - dv += values[i]; + for (int v = 0; v < values.length - 1; v++) { + if (values[v + 1] < values[v]) { + dv += values[v]; } } dv += (values[values.length - 1] - values[0]); @@ -166,9 +188,13 @@ record Pod(String name, long[] times, long[] values) { } } - Map runRateTest(List pods, TimeValue unit, TimeValue interval) { + record Pod(String name, String cluster, Interval... intervals) {} + + List> runRateTest(List pods, List groupings, TimeValue unit, TimeValue bucketInterval) throws IOException { + cleanup(); + directory = newDirectory(); long unitInMillis = unit.millis(); - record Doc(String pod, long timestamp, long requests) { + record Doc(String pod, String cluster, long timestamp, long requests) { } var sourceOperatorFactory = createTimeSeriesSourceOperator( @@ -177,70 +203,114 @@ record Doc(String pod, long timestamp, long requests) { Integer.MAX_VALUE, between(1, 100), randomBoolean(), - interval, + bucketInterval, writer -> { List docs = new ArrayList<>(); for (Pod pod : pods) { - for (int i = 0; i < pod.times.length; i++) { - docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); + for (Interval interval : pod.intervals) { + for (int i = 0; i < interval.times.length; i++) { + docs.add(new Doc(pod.name, pod.cluster, interval.offset + interval.times[i], interval.values[i])); + } } } Randomness.shuffle(docs); for (Doc doc : docs) { - writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); + writeTS( + writer, + doc.timestamp, + new Object[] { "pod", doc.pod, "cluster", doc.cluster }, + new Object[] { "requests", doc.requests } + ); } return docs.size(); } ); var ctx = driverContext(); - var aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) - ); - Operator initialHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.INITIAL, + List extractOperators = new ArrayList<>(); + var rateField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); + Operator extractRate = (ValuesSourceReaderOperatorTests.factory(reader, rateField, ElementType.LONG).get(ctx)); + extractOperators.add(extractRate); + List nonBucketGroupings = new ArrayList<>(groupings); + nonBucketGroupings.remove("bucket"); + for (String grouping : nonBucketGroupings) { + var groupingField = new KeywordFieldMapper.KeywordFieldType(grouping); + extractOperators.add(ValuesSourceReaderOperatorTests.factory(reader, groupingField, ElementType.BYTES_REF).get(ctx)); + } + // _doc, tsid, timestamp, bucket, requests, grouping1, grouping2 + Operator intialAgg = new TimeSeriesAggregationOperatorFactories.Initial( 1, 3, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); - aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL) - ); - Operator finalHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.FINAL, + // tsid, bucket, rate[0][0],rate[0][1],rate[0][2], grouping1, grouping2 + Operator intermediateAgg = new TimeSeriesAggregationOperatorFactories.Intermediate( 0, 1, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); + // tsid, bucket, rate, grouping1, grouping2 + List finalGroups = new ArrayList<>(); + int groupChannel = 3; + for (String grouping : groupings) { + if (grouping.equals("bucket")) { + finalGroups.add(new BlockHash.GroupSpec(1, ElementType.LONG)); + } else { + finalGroups.add(new BlockHash.GroupSpec(groupChannel++, ElementType.BYTES_REF)); + } + } + Operator finalAgg = new TimeSeriesAggregationOperatorFactories.Final( + finalGroups, + List.of(new SumDoubleAggregatorFunctionSupplier(List.of(2))), + List.of(), + between(1, 100) + ).get(ctx); + List results = new ArrayList<>(); - var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( ctx, sourceOperatorFactory.get(ctx), - List.of(ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), initialHash, finalHash), + CollectionUtils.concatLists(extractOperators, List.of(intialAgg, intermediateAgg, finalAgg)), new TestResultPageSinkOperator(results::add), () -> {} ) ); - Map rates = new HashMap<>(); + List> values = new ArrayList<>(); for (Page result : results) { - BytesRefBlock keysBlock = result.getBlock(0); - LongBlock timestampIntervalsBock = result.getBlock(1); - DoubleBlock ratesBlock = result.getBlock(2); - for (int i = 0; i < result.getPositionCount(); i++) { - var key = new Group(keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(), timestampIntervalsBock.getLong(i)); - rates.put(key, ratesBlock.getDouble(i)); + for (int p = 0; p < result.getPositionCount(); p++) { + int blockCount = result.getBlockCount(); + List row = new ArrayList<>(); + for (int b = 0; b < blockCount; b++) { + row.add(BlockUtils.toJavaObject(result.getBlock(b), p)); + } + values.add(row); } result.releaseBlocks(); } - return rates; + values.sort((v1, v2) -> { + for (int i = 0; i < v1.size(); i++) { + if (v1.get(i) instanceof BytesRef b1) { + int cmp = b1.compareTo((BytesRef) v2.get(i)); + if (cmp != 0) { + return cmp; + } + } else if (v1.get(i) instanceof Long b1) { + int cmp = b1.compareTo((Long) v2.get(i)); + if (cmp != 0) { + return -cmp; + } + } + } + return 0; + }); + return values; } - - record Group(String tsidHash, long timestampInterval) {} } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index bdaa045633dc0..51332b3c8997a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -55,6 +55,7 @@ import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Supplier; @@ -94,6 +95,8 @@ public void testBasic() throws Exception { ExchangeSink sink1 = sinkExchanger.createExchangeSink(); ExchangeSink sink2 = sinkExchanger.createExchangeSink(); ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletion = new PlainActionFuture<>(); + sourceExchanger.addCompletionListener(sourceCompletion); ExchangeSource source = sourceExchanger.createExchangeSource(); sourceExchanger.addRemoteSink(sinkExchanger::fetchPageAsync, 1); SubscribableListener waitForReading = source.waitForReading(); @@ -133,7 +136,9 @@ public void testBasic() throws Exception { sink2.finish(); assertTrue(sink2.isFinished()); assertTrue(source.isFinished()); + assertFalse(sourceCompletion.isDone()); source.finish(); + sourceCompletion.actionGet(10, TimeUnit.SECONDS); ESTestCase.terminate(threadPool); for (Page page : pages) { page.releaseBlocks(); @@ -320,7 +325,9 @@ protected void start(Driver driver, ActionListener listener) { public void testConcurrentWithHandlers() { BlockFactory blockFactory = blockFactory(); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); var sourceExchanger = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + sourceExchanger.addCompletionListener(sourceCompletionFuture); List sinkHandlers = new ArrayList<>(); Supplier exchangeSink = () -> { final ExchangeSinkHandler sinkHandler; @@ -336,6 +343,7 @@ public void testConcurrentWithHandlers() { final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceExchanger::createExchangeSource, exchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } public void testEarlyTerminate() { @@ -358,7 +366,7 @@ public void testEarlyTerminate() { assertTrue(sink.isFinished()); } - public void testConcurrentWithTransportActions() throws Exception { + public void testConcurrentWithTransportActions() { MockTransportService node0 = newTransportService(); ExchangeService exchange0 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange0.registerTransportHandler(node0); @@ -371,12 +379,15 @@ public void testConcurrentWithTransportActions() throws Exception { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } @@ -427,6 +438,8 @@ public void sendResponse(TransportResponse transportResponse) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); Transport.Connection connection = node0.getConnection(node1.getLocalDiscoNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); @@ -438,6 +451,7 @@ public void sendResponse(TransportResponse transportResponse) { assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); sinkHandler.onFailure(new RuntimeException(cause)); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 4f43e54a82546..072dc5265fe60 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -126,6 +126,7 @@ public void testDoNotLogWithInfo() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108367") public void testDoLogWithDebug() throws IOException { try { setLoggingLevel("DEBUG"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec similarity index 88% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec index 6ddc9601db4ac..64c4641b2ca01 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec @@ -1,4 +1,7 @@ +# Examples that were published in a blog post + 2023-08-08.full-blown-query +required_feature: esql.enrich_load FROM employees | WHERE still_hired == true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 721cff076aeaa..8d54288de552d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -621,6 +621,40 @@ dt:datetime |plus_post:datetime |plus_pre:datetime 2100-01-01T01:01:01.001Z |null |null ; +datePlusQuarter +# "quarter" introduced in 8.15 +required_feature: esql.timespan_abbreviations +row dt = to_dt("2100-01-01T01:01:01.000Z") +| eval plusQuarter = dt + 2 quarters +; + +dt:datetime | plusQuarter:datetime +2100-01-01T01:01:01.000Z | 2100-07-01T01:01:01.000Z +; + +datePlusAbbreviatedDurations +# abbreviations introduced in 8.15 +required_feature: esql.timespan_abbreviations +row dt = to_dt("2100-01-01T00:00:00.000Z") +| eval plusDurations = dt + 1 h + 2 min + 2 sec + 1 s + 4 ms +; + +dt:datetime | plusDurations:datetime +2100-01-01T00:00:00.000Z | 2100-01-01T01:02:03.004Z +; + +datePlusAbbreviatedPeriods +# abbreviations introduced in 8.15 +required_feature: esql.timespan_abbreviations +row dt = to_dt("2100-01-01T00:00:00.000Z") +| eval plusDurations = dt + 0 yr + 1y + 2 q + 3 mo + 4 w + 3 d +; + +dt:datetime | plusDurations:datetime +2100-01-01T00:00:00.000Z | 2101-11-01T00:00:00.000Z +; + + dateMinusDuration row dt = to_dt("2100-01-01T01:01:01.001Z") | eval minus = dt - 1 hour - 1 minute - 1 second - 1 milliseconds; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec deleted file mode 100644 index 367fbf044deed..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ /dev/null @@ -1,350 +0,0 @@ -simple -row language_code = "1" -| enrich languages_policy -; - -language_code:keyword | language_name:keyword -1 | English -; - - -enrichOn -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; - -emp_no:integer | language_name:keyword -10001 | French -; - - -enrichOn2 -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; - -emp_no:integer | language_name:keyword -10001 | French -; - -simpleSortLimit -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; - -emp_no:integer | language_name:keyword -10001 | French -; - - -with -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 -| enrich languages_policy on x with language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -withAlias -from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasSort -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasOverwriteName#[skip:-8.13.0] -from employees | sort emp_no -| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name -| keep emp_no | limit 1 -; - -emp_no:keyword -French -; - - -withAliasAndPlain -from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, language_name; - -emp_no:integer | x:keyword | lang:keyword | language_name:keyword -10100 | 4 | German | German -10099 | 2 | French | French -10098 | 4 | German | German -; - - -withTwoAliasesSameProp -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, lang2 = language_name; - -emp_no:integer | x:keyword | lang:keyword | lang2:keyword -10001 | 2 | French | French -; - - -redundantWith -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -nullInput -from employees | where emp_no == 10017 | keep emp_no, gender -| enrich languages_policy on gender with language_name, language_name; - -emp_no:integer | gender:keyword | language_name:keyword -10017 | null | null -; - - -constantNullInput -from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10020 | null | null -; - - -multipleEnrich -row a = "1", b = "2", c = "10" -| enrich languages_policy on a with a_lang = language_name -| enrich languages_policy on b with b_lang = language_name -| enrich languages_policy on c with c_lang = language_name; - -a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword -1 | 2 | 10 | English | French | null -; - - -enrichEval -from employees | eval x = to_string(languages) -| enrich languages_policy on x with lang = language_name -| eval language = concat(x, "-", lang) -| keep emp_no, x, lang, language -| sort emp_no desc | limit 3; - -emp_no:integer | x:keyword | lang:keyword | language:keyword -10100 | 4 | German | 4-German -10099 | 2 | French | 2-French -10098 | 4 | German | 4-German -; - - -multivalue -required_feature: esql.mv_sort -row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); - -a:keyword | a_lang:keyword -["1", "2"] | ["English", "French"] -; - - -enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env -| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) -| KEEP client_ip, count_env, max_env -| SORT client_ip -; - -client_ip:ip | count_env:i | max_env:keyword -172.21.0.5 | 1 | Development -172.21.2.113 | 2 | QA -172.21.2.162 | 2 | QA -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -; - - -enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr -| KEEP client_ip, env, client_cidr -| SORT client_ip -; - -client_ip:ip | env:keyword | client_cidr:ip_range -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.0.5 | Development | 172.21.0.0/16 -172.21.2.113 | [Development, QA] | 172.21.2.0/24 -172.21.2.162 | [Development, QA] | 172.21.2.0/24 -; - - -enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date > "1960-01-01" -| EVAL birth_year = DATE_EXTRACT("year", birth_date) -| EVAL age = 2022 - birth_year -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group, birth_year -| KEEP birth_year, age_group, count -| SORT birth_year DESC -; - -birth_year:long | age_group:keyword | count:long -1965 | Middle-aged | 1 -1964 | Middle-aged | 4 -1963 | Middle-aged | 7 -1962 | Senior | 6 -1961 | Senior | 8 -1960 | Senior | 8 -; - - -enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date IS NOT NULL -| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group -| SORT count DESC -; - -count:long | age_group:keyword -78 | Senior -12 | Middle-aged -; - - -enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH heights_policy ON height WITH height_group = description -| STATS count=count(height_group), min=min(height), max=max(height) BY height_group -| KEEP height_group, min, max, count -| SORT min ASC -; - -height_group:k | min:double | max:double | count:long -Very Short | 1.41 | 1.48 | 9 -Short | 1.5 | 1.59 | 20 -Medium Height | 1.61 | 1.79 | 26 -Tall | 1.8 | 1.99 | 25 -Very Tall | 2.0 | 2.1 | 20 -; - - -enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description -| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description -| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description -| KEEP birth_decade, hire_decade, birth_description, hire_description, count -| SORT birth_decade DESC, hire_decade DESC -; - -birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long -null | 1990 | null | Nineties Nostalgia | 6 -null | 1980 | null | Radical Eighties | 4 -1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 -1960 | 1980 | Swinging Sixties | Radical Eighties | 21 -1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 -1950 | 1980 | Nifty Fifties | Radical Eighties | 34 -; - - -spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_names ON city WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.mv_warn - -FROM airports -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) -; -warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. -warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value - -city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer -POINT(1.396561 24.127649) | 872 | 88 | 1044 -; - - -spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*) BY airport_in_city -| SORT count ASC -; - -count:long | airport_in_city:boolean -114 | null -396 | true -455 | false -; - - -spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city -| SORT count ASC -; - -count:long | centroid:geo_point | airport_in_city:boolean -114 | POINT (-24.750062 31.575549) | null -396 | POINT (-2.534797 20.667712) | true -455 | POINT (3.090752 27.676442) | false -; - - -spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "IDR" -| ENRICH city_airports ON name WITH city_name = city, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length -; - -abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i -IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index f5847260bbb16..e84e79748c179 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -1,10 +1,10 @@ -simple +simpleNoLoad from employees | eval x = 1, y = to_string(languages) | enrich languages_policy on y | where x > 1 | keep emp_no, language_name | limit 1; emp_no:integer | language_name:keyword ; -docsGettingStartedEnrich +docsGettingStartedEnrichNoLoad // tag::gs-enrich[] FROM sample_data | KEEP @timestamp, client_ip, event_duration @@ -30,3 +30,408 @@ FROM sample_data median_duration:double | env:keyword ; + +simple +required_feature: esql.enrich_load + +row language_code = "1" +| enrich languages_policy +; + +language_code:keyword | language_name:keyword +1 | English +; + + +enrichOn +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; + +emp_no:integer | language_name:keyword +10001 | French +; + + +enrichOn2 +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; + +emp_no:integer | language_name:keyword +10001 | French +; + + +simpleSortLimit +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; + +emp_no:integer | language_name:keyword +10001 | French +; + +with +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 +| enrich languages_policy on x with language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +withAlias +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + + +withAliasSort +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + + +withAliasOverwriteName#[skip:-8.13.0] +required_feature: esql.enrich_load + +from employees | sort emp_no +| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name +| keep emp_no | limit 1 +; + +emp_no:keyword +French +; + +withAliasAndPlain +required_feature: esql.enrich_load + +from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, language_name; + +emp_no:integer | x:keyword | lang:keyword | language_name:keyword +10100 | 4 | German | German +10099 | 2 | French | French +10098 | 4 | German | German +; + + +withTwoAliasesSameProp +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, lang2 = language_name; + +emp_no:integer | x:keyword | lang:keyword | lang2:keyword +10001 | 2 | French | French +; + + +redundantWith +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +nullInput +required_feature: esql.enrich_load + +from employees | where emp_no == 10017 | keep emp_no, gender +| enrich languages_policy on gender with language_name, language_name; + +emp_no:integer | gender:keyword | language_name:keyword +10017 | null | null +; + + +constantNullInput +required_feature: esql.enrich_load + +from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10020 | null | null +; + + +multipleEnrich +required_feature: esql.enrich_load + +row a = "1", b = "2", c = "10" +| enrich languages_policy on a with a_lang = language_name +| enrich languages_policy on b with b_lang = language_name +| enrich languages_policy on c with c_lang = language_name; + +a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword +1 | 2 | 10 | English | French | null +; + + +enrichEval +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) +| enrich languages_policy on x with lang = language_name +| eval language = concat(x, "-", lang) +| keep emp_no, x, lang, language +| sort emp_no desc | limit 3; + +emp_no:integer | x:keyword | lang:keyword | language:keyword +10100 | 4 | German | 4-German +10099 | 2 | French | 2-French +10098 | 4 | German | 4-German +; + + +multivalue +required_feature: esql.enrich_load +required_feature: esql.mv_sort + +row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); + +a:keyword | a_lang:keyword +["1", "2"] | ["English", "French"] +; + + +enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] +required_feature: esql.enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env +| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) +| KEEP client_ip, count_env, max_env +| SORT client_ip +; + +client_ip:ip | count_env:i | max_env:keyword +172.21.0.5 | 1 | Development +172.21.2.113 | 2 | QA +172.21.2.162 | 2 | QA +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +; + + +enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] +required_feature: esql.enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr +| KEEP client_ip, env, client_cidr +| SORT client_ip +; + +client_ip:ip | env:keyword | client_cidr:ip_range +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.0.5 | Development | 172.21.0.0/16 +172.21.2.113 | [Development, QA] | 172.21.2.0/24 +172.21.2.162 | [Development, QA] | 172.21.2.0/24 +; + + +enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| WHERE birth_date > "1960-01-01" +| EVAL birth_year = DATE_EXTRACT("year", birth_date) +| EVAL age = 2022 - birth_year +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group, birth_year +| KEEP birth_year, age_group, count +| SORT birth_year DESC +; + +birth_year:long | age_group:keyword | count:long +1965 | Middle-aged | 1 +1964 | Middle-aged | 4 +1963 | Middle-aged | 7 +1962 | Senior | 6 +1961 | Senior | 8 +1960 | Senior | 8 +; + + +enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| WHERE birth_date IS NOT NULL +| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group +| SORT count DESC +; + +count:long | age_group:keyword +78 | Senior +12 | Middle-aged +; + + +enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| ENRICH heights_policy ON height WITH height_group = description +| STATS count=count(height_group), min=min(height), max=max(height) BY height_group +| KEEP height_group, min, max, count +| SORT min ASC +; + +height_group:k | min:double | max:double | count:long +Very Short | 1.41 | 1.48 | 9 +Short | 1.5 | 1.59 | 20 +Medium Height | 1.61 | 1.79 | 26 +Tall | 1.8 | 1.99 | 25 +Very Tall | 2.0 | 2.1 | 20 +; + + +enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description +| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description +| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description +| KEEP birth_decade, hire_decade, birth_description, hire_description, count +| SORT birth_decade DESC, hire_decade DESC +; + +birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long +null | 1990 | null | Nineties Nostalgia | 6 +null | 1980 | null | Radical Eighties | 4 +1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 +1960 | 1980 | Swinging Sixties | Radical Eighties | 21 +1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 +1950 | 1980 | Nifty Fifties | Radical Eighties | 34 +; + + +spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_names ON city WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load +required_feature: esql.mv_warn + +FROM airports +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) +; +warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value + +city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer +POINT(1.396561 24.127649) | 872 | 88 | 1044 +; + + +spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_feature: esql.enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*) BY airport_in_city +| SORT count ASC +; + +count:long | airport_in_city:boolean +114 | null +396 | true +455 | false +; + + +spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_feature: esql.enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city +| SORT count ASC +; + +count:long | centroid:geo_point | airport_in_city:boolean +114 | POINT (-24.750062 31.575549) | null +396 | POINT (-2.534797 20.667712) | true +455 | POINT (3.090752 27.676442) | false +; + + +spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "IDR" +| ENRICH city_airports ON name WITH city_name = city, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length +; + +abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i +IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 +; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index c4a3dc7c56615..f16afa86199f9 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -201,10 +201,6 @@ FROM_UNQUOTED_IDENTIFIER : FROM_UNQUOTED_IDENTIFIER_PART+ ; -FROM_QUOTED_IDENTIFIER - : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) - ; - FROM_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 62dcc6ebd484b..e30bc83595942 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -109,7 +109,6 @@ fromCommand fromIdentifier : FROM_UNQUOTED_IDENTIFIER - | QUOTED_IDENTIFIER ; fromOptions diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index e9a2fb88e1991..84993a96f040d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -474,7 +474,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(matchType); out.writeString(matchField); out.writeWriteable(inputPage); - PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE); + PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, null); planOut.writeCollection(extractFields, writerFromPlanWriter(PlanStreamOutput::writeNamedExpression)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java index 6dc859afe37e3..55dff823806d2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -145,7 +145,13 @@ public static Part resolve(String dateTimeUnit) { | millisecond | milliseconds, ms | microsecond | microseconds, mcs | nanosecond | nanoseconds, ns - |===""", examples = @Example(file = "date", tag = "docsDateDiff")) + |=== + + Note that while there is an overlap between the function's supported units and + {esql}'s supported time span literals, these sets are distinct and not + interchangeable. Similarly, the supported abbreviations are conveniently shared + with implementations of this function in other established products and not + necessarily common with the date-time nomenclature used by {es}.""", examples = @Example(file = "date", tag = "docsDateDiff")) public DateDiff( Source source, @Param(name = "unit", type = { "keyword", "text" }, description = "Time difference unit") Expression unit, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 046e46d216bdc..93bd2518ae380 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -7,10 +7,23 @@ package org.elasticsearch.xpack.esql.io.stream; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockStreamInput; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BooleanBigArrayBlock; +import org.elasticsearch.compute.data.DoubleBigArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.LongBigArrayBlock; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanNamedReader; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -60,6 +73,8 @@ public NameId apply(long streamNameId) { private static final Supplier> DEFAULT_NAME_ID_FUNC = NameIdMapper::new; + private final Map cachedBlocks = new HashMap<>(); + private final PlanNameRegistry registry; // hook for nameId, where can cache and map, for now just return a NameId of the same long value. @@ -180,6 +195,79 @@ public EsqlConfiguration configuration() throws IOException { return configuration; } + /** + * Read a {@link Block} as part of the plan. + *

+ * These {@link Block}s are not tracked by {@link BlockFactory} and closing them + * does nothing so they should be small. We do make sure not to send duplicates, + * reusing blocks sent as part of the {@link EsqlConfiguration#tables()} if + * possible, otherwise sending a {@linkplain Block} inline. + *

+ */ + public Block readCachedBlock() throws IOException { + byte key = readByte(); + Block block = switch (key) { + case PlanStreamOutput.NEW_BLOCK_KEY -> { + int id = readVInt(); + // TODO track blocks read over the wire.... Or slice them from BigArrays? Something. + Block b = new BlockStreamInput( + this, + new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE) + ).readNamedWriteable(Block.class); + cachedBlocks.put(id, b); + yield b; + } + case PlanStreamOutput.FROM_PREVIOUS_KEY -> cachedBlocks.get(readVInt()); + case PlanStreamOutput.FROM_CONFIG_KEY -> { + String tableName = readString(); + Map table = configuration.tables().get(tableName); + if (table == null) { + throw new IOException("can't find table [" + tableName + "]"); + } + String columnName = readString(); + Column column = table.get(columnName); + if (column == null) { + throw new IOException("can't find column[" + columnName + "]"); + } + yield column.values(); + } + default -> throw new IOException("invalid encoding for Block"); + }; + assert block instanceof LongBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof IntBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof DoubleBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof BooleanBigArrayBlock == false : "BigArrays not supported because we don't close"; + return block; + } + + /** + * Read an array of {@link Block}s as part of the plan. + *

+ * These {@link Block}s are not tracked by {@link BlockFactory} and closing them + * does nothing so they should be small. We do make sure not to send duplicates, + * reusing blocks sent as part of the {@link EsqlConfiguration#tables()} if + * possible, otherwise sending a {@linkplain Block} inline. + *

+ */ + public Block[] readCachedBlockArray() throws IOException { + int len = readArraySize(); + if (len == 0) { + return BlockUtils.NO_BLOCKS; + } + Block[] blocks = new Block[len]; + try { + for (int i = 0; i < blocks.length; i++) { + blocks[i] = readCachedBlock(); + } + return blocks; + } finally { + if (blocks[blocks.length - 1] == null) { + // Wasn't successful reading all blocks + Releasables.closeExpectNoException(blocks); + } + } + } + static void throwOnNullOptionalRead(Class type) throws IOException { final IOException e = new IOException("read optional named returned null which is not allowed, type:" + type); assert false : e; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 5ee292b6add9e..d78e004aade31 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -8,9 +8,20 @@ package org.elasticsearch.xpack.esql.io.stream; import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBigArrayBlock; +import org.elasticsearch.compute.data.DoubleBigArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.LongBigArrayBlock; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.NamedExpression; @@ -18,6 +29,8 @@ import org.elasticsearch.xpack.ql.tree.Source; import java.io.IOException; +import java.util.IdentityHashMap; +import java.util.Map; import java.util.function.Function; import static org.elasticsearch.xpack.ql.util.SourceUtils.writeSourceNoText; @@ -28,19 +41,42 @@ */ public final class PlanStreamOutput extends StreamOutput { + /** + * Cache of written blocks. We use an {@link IdentityHashMap} for this + * because calculating the {@link Object#hashCode} of a {@link Block} + * is slow. And so is {@link Object#equals}. So, instead we just use + * object identity. + */ + private final Map cachedBlocks = new IdentityHashMap<>(); + private final StreamOutput delegate; private final PlanNameRegistry registry; private final Function, String> nameSupplier; - public PlanStreamOutput(StreamOutput delegate, PlanNameRegistry registry) { - this(delegate, registry, PlanNamedTypes::name); + private int nextCachedBlock = 0; + + public PlanStreamOutput(StreamOutput delegate, PlanNameRegistry registry, @Nullable EsqlConfiguration configuration) + throws IOException { + this(delegate, registry, configuration, PlanNamedTypes::name); } - public PlanStreamOutput(StreamOutput delegate, PlanNameRegistry registry, Function, String> nameSupplier) { + public PlanStreamOutput( + StreamOutput delegate, + PlanNameRegistry registry, + @Nullable EsqlConfiguration configuration, + Function, String> nameSupplier + ) throws IOException { this.delegate = delegate; this.registry = registry; this.nameSupplier = nameSupplier; + if (configuration != null) { + for (Map.Entry> table : configuration.tables().entrySet()) { + for (Map.Entry column : table.getValue().entrySet()) { + cachedBlocks.put(column.getValue().values(), fromConfigKey(table.getKey(), column.getKey())); + } + } + } } public void writeLogicalPlanNode(LogicalPlan logicalPlan) throws IOException { @@ -130,4 +166,86 @@ public void setTransportVersion(TransportVersion version) { delegate.setTransportVersion(version); super.setTransportVersion(version); } + + /** + * Write a {@link Block} as part of the plan. + *

+ * These {@link Block}s are not tracked by {@link BlockFactory} and closing them + * does nothing so they should be small. We do make sure not to send duplicates, + * reusing blocks sent as part of the {@link EsqlConfiguration#tables()} if + * possible, otherwise sending a {@linkplain Block} inline. + *

+ */ + public void writeCachedBlock(Block block) throws IOException { + assert block instanceof LongBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof IntBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof DoubleBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof BooleanBigArrayBlock == false : "BigArrays not supported because we don't close"; + BytesReference key = cachedBlocks.get(block); + if (key != null) { + key.writeTo(this); + return; + } + writeByte(NEW_BLOCK_KEY); + writeVInt(nextCachedBlock); + cachedBlocks.put(block, fromPreviousKey(nextCachedBlock)); + writeNamedWriteable(block); + nextCachedBlock++; + } + + /** + * The byte representing a {@link Block} sent for the first time. The byte + * will be followed by a {@link StreamOutput#writeVInt} encoded identifier + * and then the contents of the {@linkplain Block} will immediately follow + * this byte. + */ + static final byte NEW_BLOCK_KEY = 0; + + /** + * The byte representing a {@link Block} that has previously been sent. + * This byte will be followed up a {@link StreamOutput#writeVInt} encoded + * identifier pointing to the block to read. + */ + static final byte FROM_PREVIOUS_KEY = 1; + + /** + * The byte representing a {@link Block} that was part of the + * {@link EsqlConfiguration#tables()} map. It is followed a string for + * the table name and then a string for the column name. + */ + static final byte FROM_CONFIG_KEY = 2; + + /** + * Build the key for reading a {@link Block} from the cache of previously + * received {@linkplain Block}s. + */ + static BytesReference fromPreviousKey(int id) throws IOException { + try (BytesStreamOutput key = new BytesStreamOutput()) { + key.writeByte(FROM_PREVIOUS_KEY); + key.writeVInt(id); + return key.bytes(); + } + } + + /** + * Build the key for reading a {@link Block} from the {@link EsqlConfiguration}. + * This is important because some operations like {@code LOOKUP} frequently read + * {@linkplain Block}s directly from the configuration. + *

+ * It'd be possible to implement this by adding all of the Blocks as "previous" + * keys in the constructor and never use this construct at all, but that'd + * require there be a consistent ordering of Blocks there. We could make one, + * but I'm afraid that'd be brittle as we evolve the code. It'd make wire + * compatibility difficult. This signal is much simpler to deal with even though + * it is more bytes over the wire. + *

+ */ + static BytesReference fromConfigKey(String table, String column) throws IOException { + try (BytesStreamOutput key = new BytesStreamOutput()) { + key.writeByte(FROM_CONFIG_KEY); + key.writeString(table); + key.writeString(column); + return key.bytes(); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 866093ef55a6c..d6ad79586fa79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -319,7 +319,6 @@ OPTIONS METADATA FROM_UNQUOTED_IDENTIFIER_PART FROM_UNQUOTED_IDENTIFIER -FROM_QUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -405,4 +404,4 @@ META_MODE SETTING_MODE atn: -[4, 0, 110, 1203, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 484, 8, 18, 11, 18, 12, 18, 485, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 494, 8, 19, 10, 19, 12, 19, 497, 9, 19, 1, 19, 3, 19, 500, 8, 19, 1, 19, 3, 19, 503, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 512, 8, 20, 10, 20, 12, 20, 515, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 523, 8, 21, 11, 21, 12, 21, 524, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 566, 8, 32, 1, 32, 4, 32, 569, 8, 32, 11, 32, 12, 32, 570, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 580, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 587, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 592, 8, 38, 10, 38, 12, 38, 595, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 603, 8, 38, 10, 38, 12, 38, 606, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 613, 8, 38, 1, 38, 3, 38, 616, 8, 38, 3, 38, 618, 8, 38, 1, 39, 4, 39, 621, 8, 39, 11, 39, 12, 39, 622, 1, 40, 4, 40, 626, 8, 40, 11, 40, 12, 40, 627, 1, 40, 1, 40, 5, 40, 632, 8, 40, 10, 40, 12, 40, 635, 9, 40, 1, 40, 1, 40, 4, 40, 639, 8, 40, 11, 40, 12, 40, 640, 1, 40, 4, 40, 644, 8, 40, 11, 40, 12, 40, 645, 1, 40, 1, 40, 5, 40, 650, 8, 40, 10, 40, 12, 40, 653, 9, 40, 3, 40, 655, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 661, 8, 40, 11, 40, 12, 40, 662, 1, 40, 1, 40, 3, 40, 667, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 798, 8, 78, 10, 78, 12, 78, 801, 9, 78, 1, 78, 1, 78, 3, 78, 805, 8, 78, 1, 78, 4, 78, 808, 8, 78, 11, 78, 12, 78, 809, 3, 78, 812, 8, 78, 1, 79, 1, 79, 4, 79, 816, 8, 79, 11, 79, 12, 79, 817, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 881, 8, 92, 1, 93, 4, 93, 884, 8, 93, 11, 93, 12, 93, 885, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 3, 101, 921, 8, 101, 1, 102, 1, 102, 3, 102, 925, 8, 102, 1, 102, 5, 102, 928, 8, 102, 10, 102, 12, 102, 931, 9, 102, 1, 102, 1, 102, 3, 102, 935, 8, 102, 1, 102, 4, 102, 938, 8, 102, 11, 102, 12, 102, 939, 3, 102, 942, 8, 102, 1, 103, 1, 103, 4, 103, 946, 8, 103, 11, 103, 12, 103, 947, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 121, 4, 121, 1023, 8, 121, 11, 121, 12, 121, 1024, 1, 121, 1, 121, 3, 121, 1029, 8, 121, 1, 121, 4, 121, 1032, 8, 121, 11, 121, 12, 121, 1033, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 4, 156, 1188, 8, 156, 11, 156, 12, 156, 1189, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 2, 513, 604, 0, 160, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 0, 202, 75, 204, 76, 206, 77, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 78, 220, 79, 222, 80, 224, 81, 226, 0, 228, 0, 230, 0, 232, 0, 234, 82, 236, 0, 238, 83, 240, 84, 242, 85, 244, 0, 246, 0, 248, 86, 250, 87, 252, 0, 254, 88, 256, 0, 258, 0, 260, 89, 262, 90, 264, 91, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 0, 280, 92, 282, 93, 284, 94, 286, 0, 288, 0, 290, 0, 292, 0, 294, 95, 296, 96, 298, 97, 300, 0, 302, 98, 304, 99, 306, 100, 308, 101, 310, 0, 312, 102, 314, 103, 316, 104, 318, 105, 320, 0, 322, 106, 324, 107, 326, 108, 328, 109, 330, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1230, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 9, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 10, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 11, 330, 1, 0, 0, 0, 12, 332, 1, 0, 0, 0, 14, 342, 1, 0, 0, 0, 16, 349, 1, 0, 0, 0, 18, 358, 1, 0, 0, 0, 20, 365, 1, 0, 0, 0, 22, 375, 1, 0, 0, 0, 24, 382, 1, 0, 0, 0, 26, 389, 1, 0, 0, 0, 28, 403, 1, 0, 0, 0, 30, 410, 1, 0, 0, 0, 32, 418, 1, 0, 0, 0, 34, 425, 1, 0, 0, 0, 36, 437, 1, 0, 0, 0, 38, 446, 1, 0, 0, 0, 40, 452, 1, 0, 0, 0, 42, 459, 1, 0, 0, 0, 44, 466, 1, 0, 0, 0, 46, 474, 1, 0, 0, 0, 48, 483, 1, 0, 0, 0, 50, 489, 1, 0, 0, 0, 52, 506, 1, 0, 0, 0, 54, 522, 1, 0, 0, 0, 56, 528, 1, 0, 0, 0, 58, 533, 1, 0, 0, 0, 60, 538, 1, 0, 0, 0, 62, 542, 1, 0, 0, 0, 64, 546, 1, 0, 0, 0, 66, 550, 1, 0, 0, 0, 68, 554, 1, 0, 0, 0, 70, 556, 1, 0, 0, 0, 72, 558, 1, 0, 0, 0, 74, 561, 1, 0, 0, 0, 76, 563, 1, 0, 0, 0, 78, 572, 1, 0, 0, 0, 80, 574, 1, 0, 0, 0, 82, 579, 1, 0, 0, 0, 84, 581, 1, 0, 0, 0, 86, 586, 1, 0, 0, 0, 88, 617, 1, 0, 0, 0, 90, 620, 1, 0, 0, 0, 92, 666, 1, 0, 0, 0, 94, 668, 1, 0, 0, 0, 96, 671, 1, 0, 0, 0, 98, 675, 1, 0, 0, 0, 100, 679, 1, 0, 0, 0, 102, 681, 1, 0, 0, 0, 104, 684, 1, 0, 0, 0, 106, 686, 1, 0, 0, 0, 108, 691, 1, 0, 0, 0, 110, 693, 1, 0, 0, 0, 112, 699, 1, 0, 0, 0, 114, 705, 1, 0, 0, 0, 116, 710, 1, 0, 0, 0, 118, 712, 1, 0, 0, 0, 120, 715, 1, 0, 0, 0, 122, 718, 1, 0, 0, 0, 124, 723, 1, 0, 0, 0, 126, 727, 1, 0, 0, 0, 128, 732, 1, 0, 0, 0, 130, 738, 1, 0, 0, 0, 132, 741, 1, 0, 0, 0, 134, 743, 1, 0, 0, 0, 136, 749, 1, 0, 0, 0, 138, 751, 1, 0, 0, 0, 140, 756, 1, 0, 0, 0, 142, 759, 1, 0, 0, 0, 144, 762, 1, 0, 0, 0, 146, 765, 1, 0, 0, 0, 148, 767, 1, 0, 0, 0, 150, 770, 1, 0, 0, 0, 152, 772, 1, 0, 0, 0, 154, 775, 1, 0, 0, 0, 156, 777, 1, 0, 0, 0, 158, 779, 1, 0, 0, 0, 160, 781, 1, 0, 0, 0, 162, 783, 1, 0, 0, 0, 164, 785, 1, 0, 0, 0, 166, 790, 1, 0, 0, 0, 168, 811, 1, 0, 0, 0, 170, 813, 1, 0, 0, 0, 172, 821, 1, 0, 0, 0, 174, 823, 1, 0, 0, 0, 176, 827, 1, 0, 0, 0, 178, 831, 1, 0, 0, 0, 180, 835, 1, 0, 0, 0, 182, 840, 1, 0, 0, 0, 184, 844, 1, 0, 0, 0, 186, 848, 1, 0, 0, 0, 188, 852, 1, 0, 0, 0, 190, 856, 1, 0, 0, 0, 192, 860, 1, 0, 0, 0, 194, 868, 1, 0, 0, 0, 196, 880, 1, 0, 0, 0, 198, 883, 1, 0, 0, 0, 200, 887, 1, 0, 0, 0, 202, 891, 1, 0, 0, 0, 204, 895, 1, 0, 0, 0, 206, 899, 1, 0, 0, 0, 208, 903, 1, 0, 0, 0, 210, 908, 1, 0, 0, 0, 212, 912, 1, 0, 0, 0, 214, 920, 1, 0, 0, 0, 216, 941, 1, 0, 0, 0, 218, 945, 1, 0, 0, 0, 220, 949, 1, 0, 0, 0, 222, 953, 1, 0, 0, 0, 224, 957, 1, 0, 0, 0, 226, 961, 1, 0, 0, 0, 228, 966, 1, 0, 0, 0, 230, 970, 1, 0, 0, 0, 232, 974, 1, 0, 0, 0, 234, 978, 1, 0, 0, 0, 236, 981, 1, 0, 0, 0, 238, 985, 1, 0, 0, 0, 240, 989, 1, 0, 0, 0, 242, 993, 1, 0, 0, 0, 244, 997, 1, 0, 0, 0, 246, 1002, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1012, 1, 0, 0, 0, 252, 1019, 1, 0, 0, 0, 254, 1028, 1, 0, 0, 0, 256, 1035, 1, 0, 0, 0, 258, 1039, 1, 0, 0, 0, 260, 1043, 1, 0, 0, 0, 262, 1047, 1, 0, 0, 0, 264, 1051, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1061, 1, 0, 0, 0, 270, 1065, 1, 0, 0, 0, 272, 1069, 1, 0, 0, 0, 274, 1073, 1, 0, 0, 0, 276, 1077, 1, 0, 0, 0, 278, 1081, 1, 0, 0, 0, 280, 1085, 1, 0, 0, 0, 282, 1089, 1, 0, 0, 0, 284, 1093, 1, 0, 0, 0, 286, 1097, 1, 0, 0, 0, 288, 1102, 1, 0, 0, 0, 290, 1106, 1, 0, 0, 0, 292, 1110, 1, 0, 0, 0, 294, 1114, 1, 0, 0, 0, 296, 1118, 1, 0, 0, 0, 298, 1122, 1, 0, 0, 0, 300, 1126, 1, 0, 0, 0, 302, 1131, 1, 0, 0, 0, 304, 1136, 1, 0, 0, 0, 306, 1140, 1, 0, 0, 0, 308, 1144, 1, 0, 0, 0, 310, 1148, 1, 0, 0, 0, 312, 1153, 1, 0, 0, 0, 314, 1163, 1, 0, 0, 0, 316, 1167, 1, 0, 0, 0, 318, 1171, 1, 0, 0, 0, 320, 1175, 1, 0, 0, 0, 322, 1180, 1, 0, 0, 0, 324, 1187, 1, 0, 0, 0, 326, 1191, 1, 0, 0, 0, 328, 1195, 1, 0, 0, 0, 330, 1199, 1, 0, 0, 0, 332, 333, 5, 100, 0, 0, 333, 334, 5, 105, 0, 0, 334, 335, 5, 115, 0, 0, 335, 336, 5, 115, 0, 0, 336, 337, 5, 101, 0, 0, 337, 338, 5, 99, 0, 0, 338, 339, 5, 116, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 6, 0, 0, 0, 341, 13, 1, 0, 0, 0, 342, 343, 5, 100, 0, 0, 343, 344, 5, 114, 0, 0, 344, 345, 5, 111, 0, 0, 345, 346, 5, 112, 0, 0, 346, 347, 1, 0, 0, 0, 347, 348, 6, 1, 1, 0, 348, 15, 1, 0, 0, 0, 349, 350, 5, 101, 0, 0, 350, 351, 5, 110, 0, 0, 351, 352, 5, 114, 0, 0, 352, 353, 5, 105, 0, 0, 353, 354, 5, 99, 0, 0, 354, 355, 5, 104, 0, 0, 355, 356, 1, 0, 0, 0, 356, 357, 6, 2, 2, 0, 357, 17, 1, 0, 0, 0, 358, 359, 5, 101, 0, 0, 359, 360, 5, 118, 0, 0, 360, 361, 5, 97, 0, 0, 361, 362, 5, 108, 0, 0, 362, 363, 1, 0, 0, 0, 363, 364, 6, 3, 0, 0, 364, 19, 1, 0, 0, 0, 365, 366, 5, 101, 0, 0, 366, 367, 5, 120, 0, 0, 367, 368, 5, 112, 0, 0, 368, 369, 5, 108, 0, 0, 369, 370, 5, 97, 0, 0, 370, 371, 5, 105, 0, 0, 371, 372, 5, 110, 0, 0, 372, 373, 1, 0, 0, 0, 373, 374, 6, 4, 3, 0, 374, 21, 1, 0, 0, 0, 375, 376, 5, 102, 0, 0, 376, 377, 5, 114, 0, 0, 377, 378, 5, 111, 0, 0, 378, 379, 5, 109, 0, 0, 379, 380, 1, 0, 0, 0, 380, 381, 6, 5, 4, 0, 381, 23, 1, 0, 0, 0, 382, 383, 5, 103, 0, 0, 383, 384, 5, 114, 0, 0, 384, 385, 5, 111, 0, 0, 385, 386, 5, 107, 0, 0, 386, 387, 1, 0, 0, 0, 387, 388, 6, 6, 0, 0, 388, 25, 1, 0, 0, 0, 389, 390, 5, 105, 0, 0, 390, 391, 5, 110, 0, 0, 391, 392, 5, 108, 0, 0, 392, 393, 5, 105, 0, 0, 393, 394, 5, 110, 0, 0, 394, 395, 5, 101, 0, 0, 395, 396, 5, 115, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 97, 0, 0, 398, 399, 5, 116, 0, 0, 399, 400, 5, 115, 0, 0, 400, 401, 1, 0, 0, 0, 401, 402, 6, 7, 0, 0, 402, 27, 1, 0, 0, 0, 403, 404, 5, 107, 0, 0, 404, 405, 5, 101, 0, 0, 405, 406, 5, 101, 0, 0, 406, 407, 5, 112, 0, 0, 407, 408, 1, 0, 0, 0, 408, 409, 6, 8, 1, 0, 409, 29, 1, 0, 0, 0, 410, 411, 5, 108, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 109, 0, 0, 413, 414, 5, 105, 0, 0, 414, 415, 5, 116, 0, 0, 415, 416, 1, 0, 0, 0, 416, 417, 6, 9, 0, 0, 417, 31, 1, 0, 0, 0, 418, 419, 5, 109, 0, 0, 419, 420, 5, 101, 0, 0, 420, 421, 5, 116, 0, 0, 421, 422, 5, 97, 0, 0, 422, 423, 1, 0, 0, 0, 423, 424, 6, 10, 5, 0, 424, 33, 1, 0, 0, 0, 425, 426, 5, 109, 0, 0, 426, 427, 5, 118, 0, 0, 427, 428, 5, 95, 0, 0, 428, 429, 5, 101, 0, 0, 429, 430, 5, 120, 0, 0, 430, 431, 5, 112, 0, 0, 431, 432, 5, 97, 0, 0, 432, 433, 5, 110, 0, 0, 433, 434, 5, 100, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 11, 6, 0, 436, 35, 1, 0, 0, 0, 437, 438, 5, 114, 0, 0, 438, 439, 5, 101, 0, 0, 439, 440, 5, 110, 0, 0, 440, 441, 5, 97, 0, 0, 441, 442, 5, 109, 0, 0, 442, 443, 5, 101, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 12, 7, 0, 445, 37, 1, 0, 0, 0, 446, 447, 5, 114, 0, 0, 447, 448, 5, 111, 0, 0, 448, 449, 5, 119, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 13, 0, 0, 451, 39, 1, 0, 0, 0, 452, 453, 5, 115, 0, 0, 453, 454, 5, 104, 0, 0, 454, 455, 5, 111, 0, 0, 455, 456, 5, 119, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 14, 8, 0, 458, 41, 1, 0, 0, 0, 459, 460, 5, 115, 0, 0, 460, 461, 5, 111, 0, 0, 461, 462, 5, 114, 0, 0, 462, 463, 5, 116, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 15, 0, 0, 465, 43, 1, 0, 0, 0, 466, 467, 5, 115, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 97, 0, 0, 469, 470, 5, 116, 0, 0, 470, 471, 5, 115, 0, 0, 471, 472, 1, 0, 0, 0, 472, 473, 6, 16, 0, 0, 473, 45, 1, 0, 0, 0, 474, 475, 5, 119, 0, 0, 475, 476, 5, 104, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 17, 0, 0, 481, 47, 1, 0, 0, 0, 482, 484, 8, 0, 0, 0, 483, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 1, 0, 0, 0, 487, 488, 6, 18, 0, 0, 488, 49, 1, 0, 0, 0, 489, 490, 5, 47, 0, 0, 490, 491, 5, 47, 0, 0, 491, 495, 1, 0, 0, 0, 492, 494, 8, 1, 0, 0, 493, 492, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 498, 500, 5, 13, 0, 0, 499, 498, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 502, 1, 0, 0, 0, 501, 503, 5, 10, 0, 0, 502, 501, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 19, 9, 0, 505, 51, 1, 0, 0, 0, 506, 507, 5, 47, 0, 0, 507, 508, 5, 42, 0, 0, 508, 513, 1, 0, 0, 0, 509, 512, 3, 52, 20, 0, 510, 512, 9, 0, 0, 0, 511, 509, 1, 0, 0, 0, 511, 510, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 516, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 516, 517, 5, 42, 0, 0, 517, 518, 5, 47, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 6, 20, 9, 0, 520, 53, 1, 0, 0, 0, 521, 523, 7, 2, 0, 0, 522, 521, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 21, 9, 0, 527, 55, 1, 0, 0, 0, 528, 529, 3, 164, 76, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 22, 10, 0, 531, 532, 6, 22, 11, 0, 532, 57, 1, 0, 0, 0, 533, 534, 3, 66, 27, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 23, 12, 0, 536, 537, 6, 23, 13, 0, 537, 59, 1, 0, 0, 0, 538, 539, 3, 54, 21, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 24, 9, 0, 541, 61, 1, 0, 0, 0, 542, 543, 3, 50, 19, 0, 543, 544, 1, 0, 0, 0, 544, 545, 6, 25, 9, 0, 545, 63, 1, 0, 0, 0, 546, 547, 3, 52, 20, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 26, 9, 0, 549, 65, 1, 0, 0, 0, 550, 551, 5, 124, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 6, 27, 13, 0, 553, 67, 1, 0, 0, 0, 554, 555, 7, 3, 0, 0, 555, 69, 1, 0, 0, 0, 556, 557, 7, 4, 0, 0, 557, 71, 1, 0, 0, 0, 558, 559, 5, 92, 0, 0, 559, 560, 7, 5, 0, 0, 560, 73, 1, 0, 0, 0, 561, 562, 8, 6, 0, 0, 562, 75, 1, 0, 0, 0, 563, 565, 7, 7, 0, 0, 564, 566, 7, 8, 0, 0, 565, 564, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 568, 1, 0, 0, 0, 567, 569, 3, 68, 28, 0, 568, 567, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 568, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 77, 1, 0, 0, 0, 572, 573, 5, 64, 0, 0, 573, 79, 1, 0, 0, 0, 574, 575, 5, 96, 0, 0, 575, 81, 1, 0, 0, 0, 576, 580, 8, 9, 0, 0, 577, 578, 5, 96, 0, 0, 578, 580, 5, 96, 0, 0, 579, 576, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 580, 83, 1, 0, 0, 0, 581, 582, 5, 95, 0, 0, 582, 85, 1, 0, 0, 0, 583, 587, 3, 70, 29, 0, 584, 587, 3, 68, 28, 0, 585, 587, 3, 84, 36, 0, 586, 583, 1, 0, 0, 0, 586, 584, 1, 0, 0, 0, 586, 585, 1, 0, 0, 0, 587, 87, 1, 0, 0, 0, 588, 593, 5, 34, 0, 0, 589, 592, 3, 72, 30, 0, 590, 592, 3, 74, 31, 0, 591, 589, 1, 0, 0, 0, 591, 590, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 593, 1, 0, 0, 0, 596, 618, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 599, 5, 34, 0, 0, 599, 600, 5, 34, 0, 0, 600, 604, 1, 0, 0, 0, 601, 603, 8, 1, 0, 0, 602, 601, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 608, 5, 34, 0, 0, 608, 609, 5, 34, 0, 0, 609, 610, 5, 34, 0, 0, 610, 612, 1, 0, 0, 0, 611, 613, 5, 34, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 615, 1, 0, 0, 0, 614, 616, 5, 34, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 618, 1, 0, 0, 0, 617, 588, 1, 0, 0, 0, 617, 597, 1, 0, 0, 0, 618, 89, 1, 0, 0, 0, 619, 621, 3, 68, 28, 0, 620, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 91, 1, 0, 0, 0, 624, 626, 3, 68, 28, 0, 625, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 633, 3, 108, 48, 0, 630, 632, 3, 68, 28, 0, 631, 630, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 667, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 636, 638, 3, 108, 48, 0, 637, 639, 3, 68, 28, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 667, 1, 0, 0, 0, 642, 644, 3, 68, 28, 0, 643, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 654, 1, 0, 0, 0, 647, 651, 3, 108, 48, 0, 648, 650, 3, 68, 28, 0, 649, 648, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 655, 1, 0, 0, 0, 653, 651, 1, 0, 0, 0, 654, 647, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 3, 76, 32, 0, 657, 667, 1, 0, 0, 0, 658, 660, 3, 108, 48, 0, 659, 661, 3, 68, 28, 0, 660, 659, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 665, 3, 76, 32, 0, 665, 667, 1, 0, 0, 0, 666, 625, 1, 0, 0, 0, 666, 636, 1, 0, 0, 0, 666, 643, 1, 0, 0, 0, 666, 658, 1, 0, 0, 0, 667, 93, 1, 0, 0, 0, 668, 669, 5, 98, 0, 0, 669, 670, 5, 121, 0, 0, 670, 95, 1, 0, 0, 0, 671, 672, 5, 97, 0, 0, 672, 673, 5, 110, 0, 0, 673, 674, 5, 100, 0, 0, 674, 97, 1, 0, 0, 0, 675, 676, 5, 97, 0, 0, 676, 677, 5, 115, 0, 0, 677, 678, 5, 99, 0, 0, 678, 99, 1, 0, 0, 0, 679, 680, 5, 61, 0, 0, 680, 101, 1, 0, 0, 0, 681, 682, 5, 58, 0, 0, 682, 683, 5, 58, 0, 0, 683, 103, 1, 0, 0, 0, 684, 685, 5, 44, 0, 0, 685, 105, 1, 0, 0, 0, 686, 687, 5, 100, 0, 0, 687, 688, 5, 101, 0, 0, 688, 689, 5, 115, 0, 0, 689, 690, 5, 99, 0, 0, 690, 107, 1, 0, 0, 0, 691, 692, 5, 46, 0, 0, 692, 109, 1, 0, 0, 0, 693, 694, 5, 102, 0, 0, 694, 695, 5, 97, 0, 0, 695, 696, 5, 108, 0, 0, 696, 697, 5, 115, 0, 0, 697, 698, 5, 101, 0, 0, 698, 111, 1, 0, 0, 0, 699, 700, 5, 102, 0, 0, 700, 701, 5, 105, 0, 0, 701, 702, 5, 114, 0, 0, 702, 703, 5, 115, 0, 0, 703, 704, 5, 116, 0, 0, 704, 113, 1, 0, 0, 0, 705, 706, 5, 108, 0, 0, 706, 707, 5, 97, 0, 0, 707, 708, 5, 115, 0, 0, 708, 709, 5, 116, 0, 0, 709, 115, 1, 0, 0, 0, 710, 711, 5, 40, 0, 0, 711, 117, 1, 0, 0, 0, 712, 713, 5, 105, 0, 0, 713, 714, 5, 110, 0, 0, 714, 119, 1, 0, 0, 0, 715, 716, 5, 105, 0, 0, 716, 717, 5, 115, 0, 0, 717, 121, 1, 0, 0, 0, 718, 719, 5, 108, 0, 0, 719, 720, 5, 105, 0, 0, 720, 721, 5, 107, 0, 0, 721, 722, 5, 101, 0, 0, 722, 123, 1, 0, 0, 0, 723, 724, 5, 110, 0, 0, 724, 725, 5, 111, 0, 0, 725, 726, 5, 116, 0, 0, 726, 125, 1, 0, 0, 0, 727, 728, 5, 110, 0, 0, 728, 729, 5, 117, 0, 0, 729, 730, 5, 108, 0, 0, 730, 731, 5, 108, 0, 0, 731, 127, 1, 0, 0, 0, 732, 733, 5, 110, 0, 0, 733, 734, 5, 117, 0, 0, 734, 735, 5, 108, 0, 0, 735, 736, 5, 108, 0, 0, 736, 737, 5, 115, 0, 0, 737, 129, 1, 0, 0, 0, 738, 739, 5, 111, 0, 0, 739, 740, 5, 114, 0, 0, 740, 131, 1, 0, 0, 0, 741, 742, 5, 63, 0, 0, 742, 133, 1, 0, 0, 0, 743, 744, 5, 114, 0, 0, 744, 745, 5, 108, 0, 0, 745, 746, 5, 105, 0, 0, 746, 747, 5, 107, 0, 0, 747, 748, 5, 101, 0, 0, 748, 135, 1, 0, 0, 0, 749, 750, 5, 41, 0, 0, 750, 137, 1, 0, 0, 0, 751, 752, 5, 116, 0, 0, 752, 753, 5, 114, 0, 0, 753, 754, 5, 117, 0, 0, 754, 755, 5, 101, 0, 0, 755, 139, 1, 0, 0, 0, 756, 757, 5, 61, 0, 0, 757, 758, 5, 61, 0, 0, 758, 141, 1, 0, 0, 0, 759, 760, 5, 61, 0, 0, 760, 761, 5, 126, 0, 0, 761, 143, 1, 0, 0, 0, 762, 763, 5, 33, 0, 0, 763, 764, 5, 61, 0, 0, 764, 145, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 147, 1, 0, 0, 0, 767, 768, 5, 60, 0, 0, 768, 769, 5, 61, 0, 0, 769, 149, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 151, 1, 0, 0, 0, 772, 773, 5, 62, 0, 0, 773, 774, 5, 61, 0, 0, 774, 153, 1, 0, 0, 0, 775, 776, 5, 43, 0, 0, 776, 155, 1, 0, 0, 0, 777, 778, 5, 45, 0, 0, 778, 157, 1, 0, 0, 0, 779, 780, 5, 42, 0, 0, 780, 159, 1, 0, 0, 0, 781, 782, 5, 47, 0, 0, 782, 161, 1, 0, 0, 0, 783, 784, 5, 37, 0, 0, 784, 163, 1, 0, 0, 0, 785, 786, 5, 91, 0, 0, 786, 787, 1, 0, 0, 0, 787, 788, 6, 76, 0, 0, 788, 789, 6, 76, 0, 0, 789, 165, 1, 0, 0, 0, 790, 791, 5, 93, 0, 0, 791, 792, 1, 0, 0, 0, 792, 793, 6, 77, 13, 0, 793, 794, 6, 77, 13, 0, 794, 167, 1, 0, 0, 0, 795, 799, 3, 70, 29, 0, 796, 798, 3, 86, 37, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 812, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 805, 3, 84, 36, 0, 803, 805, 3, 78, 33, 0, 804, 802, 1, 0, 0, 0, 804, 803, 1, 0, 0, 0, 805, 807, 1, 0, 0, 0, 806, 808, 3, 86, 37, 0, 807, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 812, 1, 0, 0, 0, 811, 795, 1, 0, 0, 0, 811, 804, 1, 0, 0, 0, 812, 169, 1, 0, 0, 0, 813, 815, 3, 80, 34, 0, 814, 816, 3, 82, 35, 0, 815, 814, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 815, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 1, 0, 0, 0, 819, 820, 3, 80, 34, 0, 820, 171, 1, 0, 0, 0, 821, 822, 3, 170, 79, 0, 822, 173, 1, 0, 0, 0, 823, 824, 3, 50, 19, 0, 824, 825, 1, 0, 0, 0, 825, 826, 6, 81, 9, 0, 826, 175, 1, 0, 0, 0, 827, 828, 3, 52, 20, 0, 828, 829, 1, 0, 0, 0, 829, 830, 6, 82, 9, 0, 830, 177, 1, 0, 0, 0, 831, 832, 3, 54, 21, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 83, 9, 0, 834, 179, 1, 0, 0, 0, 835, 836, 3, 66, 27, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 84, 12, 0, 838, 839, 6, 84, 13, 0, 839, 181, 1, 0, 0, 0, 840, 841, 3, 164, 76, 0, 841, 842, 1, 0, 0, 0, 842, 843, 6, 85, 10, 0, 843, 183, 1, 0, 0, 0, 844, 845, 3, 166, 77, 0, 845, 846, 1, 0, 0, 0, 846, 847, 6, 86, 14, 0, 847, 185, 1, 0, 0, 0, 848, 849, 3, 104, 46, 0, 849, 850, 1, 0, 0, 0, 850, 851, 6, 87, 15, 0, 851, 187, 1, 0, 0, 0, 852, 853, 3, 100, 44, 0, 853, 854, 1, 0, 0, 0, 854, 855, 6, 88, 16, 0, 855, 189, 1, 0, 0, 0, 856, 857, 3, 88, 38, 0, 857, 858, 1, 0, 0, 0, 858, 859, 6, 89, 17, 0, 859, 191, 1, 0, 0, 0, 860, 861, 5, 111, 0, 0, 861, 862, 5, 112, 0, 0, 862, 863, 5, 116, 0, 0, 863, 864, 5, 105, 0, 0, 864, 865, 5, 111, 0, 0, 865, 866, 5, 110, 0, 0, 866, 867, 5, 115, 0, 0, 867, 193, 1, 0, 0, 0, 868, 869, 5, 109, 0, 0, 869, 870, 5, 101, 0, 0, 870, 871, 5, 116, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 100, 0, 0, 873, 874, 5, 97, 0, 0, 874, 875, 5, 116, 0, 0, 875, 876, 5, 97, 0, 0, 876, 195, 1, 0, 0, 0, 877, 881, 8, 10, 0, 0, 878, 879, 5, 47, 0, 0, 879, 881, 8, 11, 0, 0, 880, 877, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 197, 1, 0, 0, 0, 882, 884, 3, 196, 92, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 199, 1, 0, 0, 0, 887, 888, 3, 172, 80, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 94, 18, 0, 890, 201, 1, 0, 0, 0, 891, 892, 3, 50, 19, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 95, 9, 0, 894, 203, 1, 0, 0, 0, 895, 896, 3, 52, 20, 0, 896, 897, 1, 0, 0, 0, 897, 898, 6, 96, 9, 0, 898, 205, 1, 0, 0, 0, 899, 900, 3, 54, 21, 0, 900, 901, 1, 0, 0, 0, 901, 902, 6, 97, 9, 0, 902, 207, 1, 0, 0, 0, 903, 904, 3, 66, 27, 0, 904, 905, 1, 0, 0, 0, 905, 906, 6, 98, 12, 0, 906, 907, 6, 98, 13, 0, 907, 209, 1, 0, 0, 0, 908, 909, 3, 108, 48, 0, 909, 910, 1, 0, 0, 0, 910, 911, 6, 99, 19, 0, 911, 211, 1, 0, 0, 0, 912, 913, 3, 104, 46, 0, 913, 914, 1, 0, 0, 0, 914, 915, 6, 100, 15, 0, 915, 213, 1, 0, 0, 0, 916, 921, 3, 70, 29, 0, 917, 921, 3, 68, 28, 0, 918, 921, 3, 84, 36, 0, 919, 921, 3, 158, 73, 0, 920, 916, 1, 0, 0, 0, 920, 917, 1, 0, 0, 0, 920, 918, 1, 0, 0, 0, 920, 919, 1, 0, 0, 0, 921, 215, 1, 0, 0, 0, 922, 925, 3, 70, 29, 0, 923, 925, 3, 158, 73, 0, 924, 922, 1, 0, 0, 0, 924, 923, 1, 0, 0, 0, 925, 929, 1, 0, 0, 0, 926, 928, 3, 214, 101, 0, 927, 926, 1, 0, 0, 0, 928, 931, 1, 0, 0, 0, 929, 927, 1, 0, 0, 0, 929, 930, 1, 0, 0, 0, 930, 942, 1, 0, 0, 0, 931, 929, 1, 0, 0, 0, 932, 935, 3, 84, 36, 0, 933, 935, 3, 78, 33, 0, 934, 932, 1, 0, 0, 0, 934, 933, 1, 0, 0, 0, 935, 937, 1, 0, 0, 0, 936, 938, 3, 214, 101, 0, 937, 936, 1, 0, 0, 0, 938, 939, 1, 0, 0, 0, 939, 937, 1, 0, 0, 0, 939, 940, 1, 0, 0, 0, 940, 942, 1, 0, 0, 0, 941, 924, 1, 0, 0, 0, 941, 934, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 946, 3, 216, 102, 0, 944, 946, 3, 170, 79, 0, 945, 943, 1, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 947, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 219, 1, 0, 0, 0, 949, 950, 3, 50, 19, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 104, 9, 0, 952, 221, 1, 0, 0, 0, 953, 954, 3, 52, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 105, 9, 0, 956, 223, 1, 0, 0, 0, 957, 958, 3, 54, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 106, 9, 0, 960, 225, 1, 0, 0, 0, 961, 962, 3, 66, 27, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 107, 12, 0, 964, 965, 6, 107, 13, 0, 965, 227, 1, 0, 0, 0, 966, 967, 3, 100, 44, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 108, 16, 0, 969, 229, 1, 0, 0, 0, 970, 971, 3, 104, 46, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 109, 15, 0, 973, 231, 1, 0, 0, 0, 974, 975, 3, 108, 48, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 110, 19, 0, 977, 233, 1, 0, 0, 0, 978, 979, 5, 97, 0, 0, 979, 980, 5, 115, 0, 0, 980, 235, 1, 0, 0, 0, 981, 982, 3, 218, 103, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 112, 20, 0, 984, 237, 1, 0, 0, 0, 985, 986, 3, 50, 19, 0, 986, 987, 1, 0, 0, 0, 987, 988, 6, 113, 9, 0, 988, 239, 1, 0, 0, 0, 989, 990, 3, 52, 20, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 114, 9, 0, 992, 241, 1, 0, 0, 0, 993, 994, 3, 54, 21, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 115, 9, 0, 996, 243, 1, 0, 0, 0, 997, 998, 3, 66, 27, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 116, 12, 0, 1000, 1001, 6, 116, 13, 0, 1001, 245, 1, 0, 0, 0, 1002, 1003, 3, 164, 76, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 10, 0, 1005, 1006, 6, 117, 21, 0, 1006, 247, 1, 0, 0, 0, 1007, 1008, 5, 111, 0, 0, 1008, 1009, 5, 110, 0, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 118, 22, 0, 1011, 249, 1, 0, 0, 0, 1012, 1013, 5, 119, 0, 0, 1013, 1014, 5, 105, 0, 0, 1014, 1015, 5, 116, 0, 0, 1015, 1016, 5, 104, 0, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 119, 22, 0, 1018, 251, 1, 0, 0, 0, 1019, 1020, 8, 12, 0, 0, 1020, 253, 1, 0, 0, 0, 1021, 1023, 3, 252, 120, 0, 1022, 1021, 1, 0, 0, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1022, 1, 0, 0, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 3, 322, 155, 0, 1027, 1029, 1, 0, 0, 0, 1028, 1022, 1, 0, 0, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1031, 1, 0, 0, 0, 1030, 1032, 3, 252, 120, 0, 1031, 1030, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1031, 1, 0, 0, 0, 1033, 1034, 1, 0, 0, 0, 1034, 255, 1, 0, 0, 0, 1035, 1036, 3, 172, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 122, 18, 0, 1038, 257, 1, 0, 0, 0, 1039, 1040, 3, 254, 121, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 123, 23, 0, 1042, 259, 1, 0, 0, 0, 1043, 1044, 3, 50, 19, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 124, 9, 0, 1046, 261, 1, 0, 0, 0, 1047, 1048, 3, 52, 20, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 125, 9, 0, 1050, 263, 1, 0, 0, 0, 1051, 1052, 3, 54, 21, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 126, 9, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 66, 27, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 12, 0, 1058, 1059, 6, 127, 13, 0, 1059, 1060, 6, 127, 13, 0, 1060, 267, 1, 0, 0, 0, 1061, 1062, 3, 100, 44, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 128, 16, 0, 1064, 269, 1, 0, 0, 0, 1065, 1066, 3, 104, 46, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 129, 15, 0, 1068, 271, 1, 0, 0, 0, 1069, 1070, 3, 108, 48, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 6, 130, 19, 0, 1072, 273, 1, 0, 0, 0, 1073, 1074, 3, 250, 119, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 131, 24, 0, 1076, 275, 1, 0, 0, 0, 1077, 1078, 3, 218, 103, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 132, 20, 0, 1080, 277, 1, 0, 0, 0, 1081, 1082, 3, 172, 80, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 133, 18, 0, 1084, 279, 1, 0, 0, 0, 1085, 1086, 3, 50, 19, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 134, 9, 0, 1088, 281, 1, 0, 0, 0, 1089, 1090, 3, 52, 20, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 135, 9, 0, 1092, 283, 1, 0, 0, 0, 1093, 1094, 3, 54, 21, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 136, 9, 0, 1096, 285, 1, 0, 0, 0, 1097, 1098, 3, 66, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 137, 12, 0, 1100, 1101, 6, 137, 13, 0, 1101, 287, 1, 0, 0, 0, 1102, 1103, 3, 108, 48, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 138, 19, 0, 1105, 289, 1, 0, 0, 0, 1106, 1107, 3, 172, 80, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 139, 18, 0, 1109, 291, 1, 0, 0, 0, 1110, 1111, 3, 168, 78, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 140, 25, 0, 1113, 293, 1, 0, 0, 0, 1114, 1115, 3, 50, 19, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 6, 141, 9, 0, 1117, 295, 1, 0, 0, 0, 1118, 1119, 3, 52, 20, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 142, 9, 0, 1121, 297, 1, 0, 0, 0, 1122, 1123, 3, 54, 21, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 143, 9, 0, 1125, 299, 1, 0, 0, 0, 1126, 1127, 3, 66, 27, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 144, 12, 0, 1129, 1130, 6, 144, 13, 0, 1130, 301, 1, 0, 0, 0, 1131, 1132, 5, 105, 0, 0, 1132, 1133, 5, 110, 0, 0, 1133, 1134, 5, 102, 0, 0, 1134, 1135, 5, 111, 0, 0, 1135, 303, 1, 0, 0, 0, 1136, 1137, 3, 50, 19, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 146, 9, 0, 1139, 305, 1, 0, 0, 0, 1140, 1141, 3, 52, 20, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1143, 6, 147, 9, 0, 1143, 307, 1, 0, 0, 0, 1144, 1145, 3, 54, 21, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 148, 9, 0, 1147, 309, 1, 0, 0, 0, 1148, 1149, 3, 66, 27, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1151, 6, 149, 12, 0, 1151, 1152, 6, 149, 13, 0, 1152, 311, 1, 0, 0, 0, 1153, 1154, 5, 102, 0, 0, 1154, 1155, 5, 117, 0, 0, 1155, 1156, 5, 110, 0, 0, 1156, 1157, 5, 99, 0, 0, 1157, 1158, 5, 116, 0, 0, 1158, 1159, 5, 105, 0, 0, 1159, 1160, 5, 111, 0, 0, 1160, 1161, 5, 110, 0, 0, 1161, 1162, 5, 115, 0, 0, 1162, 313, 1, 0, 0, 0, 1163, 1164, 3, 50, 19, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1166, 6, 151, 9, 0, 1166, 315, 1, 0, 0, 0, 1167, 1168, 3, 52, 20, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 152, 9, 0, 1170, 317, 1, 0, 0, 0, 1171, 1172, 3, 54, 21, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 153, 9, 0, 1174, 319, 1, 0, 0, 0, 1175, 1176, 3, 166, 77, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 154, 14, 0, 1178, 1179, 6, 154, 13, 0, 1179, 321, 1, 0, 0, 0, 1180, 1181, 5, 58, 0, 0, 1181, 323, 1, 0, 0, 0, 1182, 1188, 3, 78, 33, 0, 1183, 1188, 3, 68, 28, 0, 1184, 1188, 3, 108, 48, 0, 1185, 1188, 3, 70, 29, 0, 1186, 1188, 3, 84, 36, 0, 1187, 1182, 1, 0, 0, 0, 1187, 1183, 1, 0, 0, 0, 1187, 1184, 1, 0, 0, 0, 1187, 1185, 1, 0, 0, 0, 1187, 1186, 1, 0, 0, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1187, 1, 0, 0, 0, 1189, 1190, 1, 0, 0, 0, 1190, 325, 1, 0, 0, 0, 1191, 1192, 3, 50, 19, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 157, 9, 0, 1194, 327, 1, 0, 0, 0, 1195, 1196, 3, 52, 20, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 158, 9, 0, 1198, 329, 1, 0, 0, 0, 1199, 1200, 3, 54, 21, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 159, 9, 0, 1202, 331, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 485, 495, 499, 502, 511, 513, 524, 565, 570, 579, 586, 591, 593, 604, 612, 615, 617, 622, 627, 633, 640, 645, 651, 654, 662, 666, 799, 804, 809, 811, 817, 880, 885, 920, 924, 929, 934, 939, 941, 945, 947, 1024, 1028, 1033, 1187, 1189, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 68, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file +[4, 0, 110, 1197, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 482, 8, 18, 11, 18, 12, 18, 483, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 492, 8, 19, 10, 19, 12, 19, 495, 9, 19, 1, 19, 3, 19, 498, 8, 19, 1, 19, 3, 19, 501, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 510, 8, 20, 10, 20, 12, 20, 513, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 521, 8, 21, 11, 21, 12, 21, 522, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 564, 8, 32, 1, 32, 4, 32, 567, 8, 32, 11, 32, 12, 32, 568, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 578, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 585, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 590, 8, 38, 10, 38, 12, 38, 593, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 601, 8, 38, 10, 38, 12, 38, 604, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 611, 8, 38, 1, 38, 3, 38, 614, 8, 38, 3, 38, 616, 8, 38, 1, 39, 4, 39, 619, 8, 39, 11, 39, 12, 39, 620, 1, 40, 4, 40, 624, 8, 40, 11, 40, 12, 40, 625, 1, 40, 1, 40, 5, 40, 630, 8, 40, 10, 40, 12, 40, 633, 9, 40, 1, 40, 1, 40, 4, 40, 637, 8, 40, 11, 40, 12, 40, 638, 1, 40, 4, 40, 642, 8, 40, 11, 40, 12, 40, 643, 1, 40, 1, 40, 5, 40, 648, 8, 40, 10, 40, 12, 40, 651, 9, 40, 3, 40, 653, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 659, 8, 40, 11, 40, 12, 40, 660, 1, 40, 1, 40, 3, 40, 665, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 796, 8, 78, 10, 78, 12, 78, 799, 9, 78, 1, 78, 1, 78, 3, 78, 803, 8, 78, 1, 78, 4, 78, 806, 8, 78, 11, 78, 12, 78, 807, 3, 78, 810, 8, 78, 1, 79, 1, 79, 4, 79, 814, 8, 79, 11, 79, 12, 79, 815, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 879, 8, 92, 1, 93, 4, 93, 882, 8, 93, 11, 93, 12, 93, 883, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 3, 100, 915, 8, 100, 1, 101, 1, 101, 3, 101, 919, 8, 101, 1, 101, 5, 101, 922, 8, 101, 10, 101, 12, 101, 925, 9, 101, 1, 101, 1, 101, 3, 101, 929, 8, 101, 1, 101, 4, 101, 932, 8, 101, 11, 101, 12, 101, 933, 3, 101, 936, 8, 101, 1, 102, 1, 102, 4, 102, 940, 8, 102, 11, 102, 12, 102, 941, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 120, 4, 120, 1017, 8, 120, 11, 120, 12, 120, 1018, 1, 120, 1, 120, 3, 120, 1023, 8, 120, 1, 120, 4, 120, 1026, 8, 120, 11, 120, 12, 120, 1027, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 4, 155, 1182, 8, 155, 11, 155, 12, 155, 1183, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 2, 511, 602, 0, 159, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 75, 202, 76, 204, 77, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 78, 218, 79, 220, 80, 222, 81, 224, 0, 226, 0, 228, 0, 230, 0, 232, 82, 234, 0, 236, 83, 238, 84, 240, 85, 242, 0, 244, 0, 246, 86, 248, 87, 250, 0, 252, 88, 254, 0, 256, 0, 258, 89, 260, 90, 262, 91, 264, 0, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 92, 280, 93, 282, 94, 284, 0, 286, 0, 288, 0, 290, 0, 292, 95, 294, 96, 296, 97, 298, 0, 300, 98, 302, 99, 304, 100, 306, 101, 308, 0, 310, 102, 312, 103, 314, 104, 316, 105, 318, 0, 320, 106, 322, 107, 324, 108, 326, 109, 328, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1224, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 8, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 9, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 10, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 11, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 12, 330, 1, 0, 0, 0, 14, 340, 1, 0, 0, 0, 16, 347, 1, 0, 0, 0, 18, 356, 1, 0, 0, 0, 20, 363, 1, 0, 0, 0, 22, 373, 1, 0, 0, 0, 24, 380, 1, 0, 0, 0, 26, 387, 1, 0, 0, 0, 28, 401, 1, 0, 0, 0, 30, 408, 1, 0, 0, 0, 32, 416, 1, 0, 0, 0, 34, 423, 1, 0, 0, 0, 36, 435, 1, 0, 0, 0, 38, 444, 1, 0, 0, 0, 40, 450, 1, 0, 0, 0, 42, 457, 1, 0, 0, 0, 44, 464, 1, 0, 0, 0, 46, 472, 1, 0, 0, 0, 48, 481, 1, 0, 0, 0, 50, 487, 1, 0, 0, 0, 52, 504, 1, 0, 0, 0, 54, 520, 1, 0, 0, 0, 56, 526, 1, 0, 0, 0, 58, 531, 1, 0, 0, 0, 60, 536, 1, 0, 0, 0, 62, 540, 1, 0, 0, 0, 64, 544, 1, 0, 0, 0, 66, 548, 1, 0, 0, 0, 68, 552, 1, 0, 0, 0, 70, 554, 1, 0, 0, 0, 72, 556, 1, 0, 0, 0, 74, 559, 1, 0, 0, 0, 76, 561, 1, 0, 0, 0, 78, 570, 1, 0, 0, 0, 80, 572, 1, 0, 0, 0, 82, 577, 1, 0, 0, 0, 84, 579, 1, 0, 0, 0, 86, 584, 1, 0, 0, 0, 88, 615, 1, 0, 0, 0, 90, 618, 1, 0, 0, 0, 92, 664, 1, 0, 0, 0, 94, 666, 1, 0, 0, 0, 96, 669, 1, 0, 0, 0, 98, 673, 1, 0, 0, 0, 100, 677, 1, 0, 0, 0, 102, 679, 1, 0, 0, 0, 104, 682, 1, 0, 0, 0, 106, 684, 1, 0, 0, 0, 108, 689, 1, 0, 0, 0, 110, 691, 1, 0, 0, 0, 112, 697, 1, 0, 0, 0, 114, 703, 1, 0, 0, 0, 116, 708, 1, 0, 0, 0, 118, 710, 1, 0, 0, 0, 120, 713, 1, 0, 0, 0, 122, 716, 1, 0, 0, 0, 124, 721, 1, 0, 0, 0, 126, 725, 1, 0, 0, 0, 128, 730, 1, 0, 0, 0, 130, 736, 1, 0, 0, 0, 132, 739, 1, 0, 0, 0, 134, 741, 1, 0, 0, 0, 136, 747, 1, 0, 0, 0, 138, 749, 1, 0, 0, 0, 140, 754, 1, 0, 0, 0, 142, 757, 1, 0, 0, 0, 144, 760, 1, 0, 0, 0, 146, 763, 1, 0, 0, 0, 148, 765, 1, 0, 0, 0, 150, 768, 1, 0, 0, 0, 152, 770, 1, 0, 0, 0, 154, 773, 1, 0, 0, 0, 156, 775, 1, 0, 0, 0, 158, 777, 1, 0, 0, 0, 160, 779, 1, 0, 0, 0, 162, 781, 1, 0, 0, 0, 164, 783, 1, 0, 0, 0, 166, 788, 1, 0, 0, 0, 168, 809, 1, 0, 0, 0, 170, 811, 1, 0, 0, 0, 172, 819, 1, 0, 0, 0, 174, 821, 1, 0, 0, 0, 176, 825, 1, 0, 0, 0, 178, 829, 1, 0, 0, 0, 180, 833, 1, 0, 0, 0, 182, 838, 1, 0, 0, 0, 184, 842, 1, 0, 0, 0, 186, 846, 1, 0, 0, 0, 188, 850, 1, 0, 0, 0, 190, 854, 1, 0, 0, 0, 192, 858, 1, 0, 0, 0, 194, 866, 1, 0, 0, 0, 196, 878, 1, 0, 0, 0, 198, 881, 1, 0, 0, 0, 200, 885, 1, 0, 0, 0, 202, 889, 1, 0, 0, 0, 204, 893, 1, 0, 0, 0, 206, 897, 1, 0, 0, 0, 208, 902, 1, 0, 0, 0, 210, 906, 1, 0, 0, 0, 212, 914, 1, 0, 0, 0, 214, 935, 1, 0, 0, 0, 216, 939, 1, 0, 0, 0, 218, 943, 1, 0, 0, 0, 220, 947, 1, 0, 0, 0, 222, 951, 1, 0, 0, 0, 224, 955, 1, 0, 0, 0, 226, 960, 1, 0, 0, 0, 228, 964, 1, 0, 0, 0, 230, 968, 1, 0, 0, 0, 232, 972, 1, 0, 0, 0, 234, 975, 1, 0, 0, 0, 236, 979, 1, 0, 0, 0, 238, 983, 1, 0, 0, 0, 240, 987, 1, 0, 0, 0, 242, 991, 1, 0, 0, 0, 244, 996, 1, 0, 0, 0, 246, 1001, 1, 0, 0, 0, 248, 1006, 1, 0, 0, 0, 250, 1013, 1, 0, 0, 0, 252, 1022, 1, 0, 0, 0, 254, 1029, 1, 0, 0, 0, 256, 1033, 1, 0, 0, 0, 258, 1037, 1, 0, 0, 0, 260, 1041, 1, 0, 0, 0, 262, 1045, 1, 0, 0, 0, 264, 1049, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1059, 1, 0, 0, 0, 270, 1063, 1, 0, 0, 0, 272, 1067, 1, 0, 0, 0, 274, 1071, 1, 0, 0, 0, 276, 1075, 1, 0, 0, 0, 278, 1079, 1, 0, 0, 0, 280, 1083, 1, 0, 0, 0, 282, 1087, 1, 0, 0, 0, 284, 1091, 1, 0, 0, 0, 286, 1096, 1, 0, 0, 0, 288, 1100, 1, 0, 0, 0, 290, 1104, 1, 0, 0, 0, 292, 1108, 1, 0, 0, 0, 294, 1112, 1, 0, 0, 0, 296, 1116, 1, 0, 0, 0, 298, 1120, 1, 0, 0, 0, 300, 1125, 1, 0, 0, 0, 302, 1130, 1, 0, 0, 0, 304, 1134, 1, 0, 0, 0, 306, 1138, 1, 0, 0, 0, 308, 1142, 1, 0, 0, 0, 310, 1147, 1, 0, 0, 0, 312, 1157, 1, 0, 0, 0, 314, 1161, 1, 0, 0, 0, 316, 1165, 1, 0, 0, 0, 318, 1169, 1, 0, 0, 0, 320, 1174, 1, 0, 0, 0, 322, 1181, 1, 0, 0, 0, 324, 1185, 1, 0, 0, 0, 326, 1189, 1, 0, 0, 0, 328, 1193, 1, 0, 0, 0, 330, 331, 5, 100, 0, 0, 331, 332, 5, 105, 0, 0, 332, 333, 5, 115, 0, 0, 333, 334, 5, 115, 0, 0, 334, 335, 5, 101, 0, 0, 335, 336, 5, 99, 0, 0, 336, 337, 5, 116, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 6, 0, 0, 0, 339, 13, 1, 0, 0, 0, 340, 341, 5, 100, 0, 0, 341, 342, 5, 114, 0, 0, 342, 343, 5, 111, 0, 0, 343, 344, 5, 112, 0, 0, 344, 345, 1, 0, 0, 0, 345, 346, 6, 1, 1, 0, 346, 15, 1, 0, 0, 0, 347, 348, 5, 101, 0, 0, 348, 349, 5, 110, 0, 0, 349, 350, 5, 114, 0, 0, 350, 351, 5, 105, 0, 0, 351, 352, 5, 99, 0, 0, 352, 353, 5, 104, 0, 0, 353, 354, 1, 0, 0, 0, 354, 355, 6, 2, 2, 0, 355, 17, 1, 0, 0, 0, 356, 357, 5, 101, 0, 0, 357, 358, 5, 118, 0, 0, 358, 359, 5, 97, 0, 0, 359, 360, 5, 108, 0, 0, 360, 361, 1, 0, 0, 0, 361, 362, 6, 3, 0, 0, 362, 19, 1, 0, 0, 0, 363, 364, 5, 101, 0, 0, 364, 365, 5, 120, 0, 0, 365, 366, 5, 112, 0, 0, 366, 367, 5, 108, 0, 0, 367, 368, 5, 97, 0, 0, 368, 369, 5, 105, 0, 0, 369, 370, 5, 110, 0, 0, 370, 371, 1, 0, 0, 0, 371, 372, 6, 4, 3, 0, 372, 21, 1, 0, 0, 0, 373, 374, 5, 102, 0, 0, 374, 375, 5, 114, 0, 0, 375, 376, 5, 111, 0, 0, 376, 377, 5, 109, 0, 0, 377, 378, 1, 0, 0, 0, 378, 379, 6, 5, 4, 0, 379, 23, 1, 0, 0, 0, 380, 381, 5, 103, 0, 0, 381, 382, 5, 114, 0, 0, 382, 383, 5, 111, 0, 0, 383, 384, 5, 107, 0, 0, 384, 385, 1, 0, 0, 0, 385, 386, 6, 6, 0, 0, 386, 25, 1, 0, 0, 0, 387, 388, 5, 105, 0, 0, 388, 389, 5, 110, 0, 0, 389, 390, 5, 108, 0, 0, 390, 391, 5, 105, 0, 0, 391, 392, 5, 110, 0, 0, 392, 393, 5, 101, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 5, 116, 0, 0, 395, 396, 5, 97, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 115, 0, 0, 398, 399, 1, 0, 0, 0, 399, 400, 6, 7, 0, 0, 400, 27, 1, 0, 0, 0, 401, 402, 5, 107, 0, 0, 402, 403, 5, 101, 0, 0, 403, 404, 5, 101, 0, 0, 404, 405, 5, 112, 0, 0, 405, 406, 1, 0, 0, 0, 406, 407, 6, 8, 1, 0, 407, 29, 1, 0, 0, 0, 408, 409, 5, 108, 0, 0, 409, 410, 5, 105, 0, 0, 410, 411, 5, 109, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 116, 0, 0, 413, 414, 1, 0, 0, 0, 414, 415, 6, 9, 0, 0, 415, 31, 1, 0, 0, 0, 416, 417, 5, 109, 0, 0, 417, 418, 5, 101, 0, 0, 418, 419, 5, 116, 0, 0, 419, 420, 5, 97, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 10, 5, 0, 422, 33, 1, 0, 0, 0, 423, 424, 5, 109, 0, 0, 424, 425, 5, 118, 0, 0, 425, 426, 5, 95, 0, 0, 426, 427, 5, 101, 0, 0, 427, 428, 5, 120, 0, 0, 428, 429, 5, 112, 0, 0, 429, 430, 5, 97, 0, 0, 430, 431, 5, 110, 0, 0, 431, 432, 5, 100, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 11, 6, 0, 434, 35, 1, 0, 0, 0, 435, 436, 5, 114, 0, 0, 436, 437, 5, 101, 0, 0, 437, 438, 5, 110, 0, 0, 438, 439, 5, 97, 0, 0, 439, 440, 5, 109, 0, 0, 440, 441, 5, 101, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 12, 7, 0, 443, 37, 1, 0, 0, 0, 444, 445, 5, 114, 0, 0, 445, 446, 5, 111, 0, 0, 446, 447, 5, 119, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 6, 13, 0, 0, 449, 39, 1, 0, 0, 0, 450, 451, 5, 115, 0, 0, 451, 452, 5, 104, 0, 0, 452, 453, 5, 111, 0, 0, 453, 454, 5, 119, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 6, 14, 8, 0, 456, 41, 1, 0, 0, 0, 457, 458, 5, 115, 0, 0, 458, 459, 5, 111, 0, 0, 459, 460, 5, 114, 0, 0, 460, 461, 5, 116, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 6, 15, 0, 0, 463, 43, 1, 0, 0, 0, 464, 465, 5, 115, 0, 0, 465, 466, 5, 116, 0, 0, 466, 467, 5, 97, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 115, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 16, 0, 0, 471, 45, 1, 0, 0, 0, 472, 473, 5, 119, 0, 0, 473, 474, 5, 104, 0, 0, 474, 475, 5, 101, 0, 0, 475, 476, 5, 114, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 6, 17, 0, 0, 479, 47, 1, 0, 0, 0, 480, 482, 8, 0, 0, 0, 481, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 18, 0, 0, 486, 49, 1, 0, 0, 0, 487, 488, 5, 47, 0, 0, 488, 489, 5, 47, 0, 0, 489, 493, 1, 0, 0, 0, 490, 492, 8, 1, 0, 0, 491, 490, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 498, 5, 13, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 500, 1, 0, 0, 0, 499, 501, 5, 10, 0, 0, 500, 499, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 19, 9, 0, 503, 51, 1, 0, 0, 0, 504, 505, 5, 47, 0, 0, 505, 506, 5, 42, 0, 0, 506, 511, 1, 0, 0, 0, 507, 510, 3, 52, 20, 0, 508, 510, 9, 0, 0, 0, 509, 507, 1, 0, 0, 0, 509, 508, 1, 0, 0, 0, 510, 513, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 511, 509, 1, 0, 0, 0, 512, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 515, 5, 42, 0, 0, 515, 516, 5, 47, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 20, 9, 0, 518, 53, 1, 0, 0, 0, 519, 521, 7, 2, 0, 0, 520, 519, 1, 0, 0, 0, 521, 522, 1, 0, 0, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 21, 9, 0, 525, 55, 1, 0, 0, 0, 526, 527, 3, 164, 76, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 22, 10, 0, 529, 530, 6, 22, 11, 0, 530, 57, 1, 0, 0, 0, 531, 532, 3, 66, 27, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 23, 12, 0, 534, 535, 6, 23, 13, 0, 535, 59, 1, 0, 0, 0, 536, 537, 3, 54, 21, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 24, 9, 0, 539, 61, 1, 0, 0, 0, 540, 541, 3, 50, 19, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 25, 9, 0, 543, 63, 1, 0, 0, 0, 544, 545, 3, 52, 20, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 26, 9, 0, 547, 65, 1, 0, 0, 0, 548, 549, 5, 124, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 6, 27, 13, 0, 551, 67, 1, 0, 0, 0, 552, 553, 7, 3, 0, 0, 553, 69, 1, 0, 0, 0, 554, 555, 7, 4, 0, 0, 555, 71, 1, 0, 0, 0, 556, 557, 5, 92, 0, 0, 557, 558, 7, 5, 0, 0, 558, 73, 1, 0, 0, 0, 559, 560, 8, 6, 0, 0, 560, 75, 1, 0, 0, 0, 561, 563, 7, 7, 0, 0, 562, 564, 7, 8, 0, 0, 563, 562, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 566, 1, 0, 0, 0, 565, 567, 3, 68, 28, 0, 566, 565, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 566, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 77, 1, 0, 0, 0, 570, 571, 5, 64, 0, 0, 571, 79, 1, 0, 0, 0, 572, 573, 5, 96, 0, 0, 573, 81, 1, 0, 0, 0, 574, 578, 8, 9, 0, 0, 575, 576, 5, 96, 0, 0, 576, 578, 5, 96, 0, 0, 577, 574, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 578, 83, 1, 0, 0, 0, 579, 580, 5, 95, 0, 0, 580, 85, 1, 0, 0, 0, 581, 585, 3, 70, 29, 0, 582, 585, 3, 68, 28, 0, 583, 585, 3, 84, 36, 0, 584, 581, 1, 0, 0, 0, 584, 582, 1, 0, 0, 0, 584, 583, 1, 0, 0, 0, 585, 87, 1, 0, 0, 0, 586, 591, 5, 34, 0, 0, 587, 590, 3, 72, 30, 0, 588, 590, 3, 74, 31, 0, 589, 587, 1, 0, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 616, 5, 34, 0, 0, 595, 596, 5, 34, 0, 0, 596, 597, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 602, 1, 0, 0, 0, 599, 601, 8, 1, 0, 0, 600, 599, 1, 0, 0, 0, 601, 604, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 606, 5, 34, 0, 0, 606, 607, 5, 34, 0, 0, 607, 608, 5, 34, 0, 0, 608, 610, 1, 0, 0, 0, 609, 611, 5, 34, 0, 0, 610, 609, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 613, 1, 0, 0, 0, 612, 614, 5, 34, 0, 0, 613, 612, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 616, 1, 0, 0, 0, 615, 586, 1, 0, 0, 0, 615, 595, 1, 0, 0, 0, 616, 89, 1, 0, 0, 0, 617, 619, 3, 68, 28, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 91, 1, 0, 0, 0, 622, 624, 3, 68, 28, 0, 623, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 631, 3, 108, 48, 0, 628, 630, 3, 68, 28, 0, 629, 628, 1, 0, 0, 0, 630, 633, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 665, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 634, 636, 3, 108, 48, 0, 635, 637, 3, 68, 28, 0, 636, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 665, 1, 0, 0, 0, 640, 642, 3, 68, 28, 0, 641, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 652, 1, 0, 0, 0, 645, 649, 3, 108, 48, 0, 646, 648, 3, 68, 28, 0, 647, 646, 1, 0, 0, 0, 648, 651, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 645, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 1, 0, 0, 0, 654, 655, 3, 76, 32, 0, 655, 665, 1, 0, 0, 0, 656, 658, 3, 108, 48, 0, 657, 659, 3, 68, 28, 0, 658, 657, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 658, 1, 0, 0, 0, 660, 661, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 663, 3, 76, 32, 0, 663, 665, 1, 0, 0, 0, 664, 623, 1, 0, 0, 0, 664, 634, 1, 0, 0, 0, 664, 641, 1, 0, 0, 0, 664, 656, 1, 0, 0, 0, 665, 93, 1, 0, 0, 0, 666, 667, 5, 98, 0, 0, 667, 668, 5, 121, 0, 0, 668, 95, 1, 0, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 110, 0, 0, 671, 672, 5, 100, 0, 0, 672, 97, 1, 0, 0, 0, 673, 674, 5, 97, 0, 0, 674, 675, 5, 115, 0, 0, 675, 676, 5, 99, 0, 0, 676, 99, 1, 0, 0, 0, 677, 678, 5, 61, 0, 0, 678, 101, 1, 0, 0, 0, 679, 680, 5, 58, 0, 0, 680, 681, 5, 58, 0, 0, 681, 103, 1, 0, 0, 0, 682, 683, 5, 44, 0, 0, 683, 105, 1, 0, 0, 0, 684, 685, 5, 100, 0, 0, 685, 686, 5, 101, 0, 0, 686, 687, 5, 115, 0, 0, 687, 688, 5, 99, 0, 0, 688, 107, 1, 0, 0, 0, 689, 690, 5, 46, 0, 0, 690, 109, 1, 0, 0, 0, 691, 692, 5, 102, 0, 0, 692, 693, 5, 97, 0, 0, 693, 694, 5, 108, 0, 0, 694, 695, 5, 115, 0, 0, 695, 696, 5, 101, 0, 0, 696, 111, 1, 0, 0, 0, 697, 698, 5, 102, 0, 0, 698, 699, 5, 105, 0, 0, 699, 700, 5, 114, 0, 0, 700, 701, 5, 115, 0, 0, 701, 702, 5, 116, 0, 0, 702, 113, 1, 0, 0, 0, 703, 704, 5, 108, 0, 0, 704, 705, 5, 97, 0, 0, 705, 706, 5, 115, 0, 0, 706, 707, 5, 116, 0, 0, 707, 115, 1, 0, 0, 0, 708, 709, 5, 40, 0, 0, 709, 117, 1, 0, 0, 0, 710, 711, 5, 105, 0, 0, 711, 712, 5, 110, 0, 0, 712, 119, 1, 0, 0, 0, 713, 714, 5, 105, 0, 0, 714, 715, 5, 115, 0, 0, 715, 121, 1, 0, 0, 0, 716, 717, 5, 108, 0, 0, 717, 718, 5, 105, 0, 0, 718, 719, 5, 107, 0, 0, 719, 720, 5, 101, 0, 0, 720, 123, 1, 0, 0, 0, 721, 722, 5, 110, 0, 0, 722, 723, 5, 111, 0, 0, 723, 724, 5, 116, 0, 0, 724, 125, 1, 0, 0, 0, 725, 726, 5, 110, 0, 0, 726, 727, 5, 117, 0, 0, 727, 728, 5, 108, 0, 0, 728, 729, 5, 108, 0, 0, 729, 127, 1, 0, 0, 0, 730, 731, 5, 110, 0, 0, 731, 732, 5, 117, 0, 0, 732, 733, 5, 108, 0, 0, 733, 734, 5, 108, 0, 0, 734, 735, 5, 115, 0, 0, 735, 129, 1, 0, 0, 0, 736, 737, 5, 111, 0, 0, 737, 738, 5, 114, 0, 0, 738, 131, 1, 0, 0, 0, 739, 740, 5, 63, 0, 0, 740, 133, 1, 0, 0, 0, 741, 742, 5, 114, 0, 0, 742, 743, 5, 108, 0, 0, 743, 744, 5, 105, 0, 0, 744, 745, 5, 107, 0, 0, 745, 746, 5, 101, 0, 0, 746, 135, 1, 0, 0, 0, 747, 748, 5, 41, 0, 0, 748, 137, 1, 0, 0, 0, 749, 750, 5, 116, 0, 0, 750, 751, 5, 114, 0, 0, 751, 752, 5, 117, 0, 0, 752, 753, 5, 101, 0, 0, 753, 139, 1, 0, 0, 0, 754, 755, 5, 61, 0, 0, 755, 756, 5, 61, 0, 0, 756, 141, 1, 0, 0, 0, 757, 758, 5, 61, 0, 0, 758, 759, 5, 126, 0, 0, 759, 143, 1, 0, 0, 0, 760, 761, 5, 33, 0, 0, 761, 762, 5, 61, 0, 0, 762, 145, 1, 0, 0, 0, 763, 764, 5, 60, 0, 0, 764, 147, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 767, 5, 61, 0, 0, 767, 149, 1, 0, 0, 0, 768, 769, 5, 62, 0, 0, 769, 151, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 772, 5, 61, 0, 0, 772, 153, 1, 0, 0, 0, 773, 774, 5, 43, 0, 0, 774, 155, 1, 0, 0, 0, 775, 776, 5, 45, 0, 0, 776, 157, 1, 0, 0, 0, 777, 778, 5, 42, 0, 0, 778, 159, 1, 0, 0, 0, 779, 780, 5, 47, 0, 0, 780, 161, 1, 0, 0, 0, 781, 782, 5, 37, 0, 0, 782, 163, 1, 0, 0, 0, 783, 784, 5, 91, 0, 0, 784, 785, 1, 0, 0, 0, 785, 786, 6, 76, 0, 0, 786, 787, 6, 76, 0, 0, 787, 165, 1, 0, 0, 0, 788, 789, 5, 93, 0, 0, 789, 790, 1, 0, 0, 0, 790, 791, 6, 77, 13, 0, 791, 792, 6, 77, 13, 0, 792, 167, 1, 0, 0, 0, 793, 797, 3, 70, 29, 0, 794, 796, 3, 86, 37, 0, 795, 794, 1, 0, 0, 0, 796, 799, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 810, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 800, 803, 3, 84, 36, 0, 801, 803, 3, 78, 33, 0, 802, 800, 1, 0, 0, 0, 802, 801, 1, 0, 0, 0, 803, 805, 1, 0, 0, 0, 804, 806, 3, 86, 37, 0, 805, 804, 1, 0, 0, 0, 806, 807, 1, 0, 0, 0, 807, 805, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 810, 1, 0, 0, 0, 809, 793, 1, 0, 0, 0, 809, 802, 1, 0, 0, 0, 810, 169, 1, 0, 0, 0, 811, 813, 3, 80, 34, 0, 812, 814, 3, 82, 35, 0, 813, 812, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 813, 1, 0, 0, 0, 815, 816, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 818, 3, 80, 34, 0, 818, 171, 1, 0, 0, 0, 819, 820, 3, 170, 79, 0, 820, 173, 1, 0, 0, 0, 821, 822, 3, 50, 19, 0, 822, 823, 1, 0, 0, 0, 823, 824, 6, 81, 9, 0, 824, 175, 1, 0, 0, 0, 825, 826, 3, 52, 20, 0, 826, 827, 1, 0, 0, 0, 827, 828, 6, 82, 9, 0, 828, 177, 1, 0, 0, 0, 829, 830, 3, 54, 21, 0, 830, 831, 1, 0, 0, 0, 831, 832, 6, 83, 9, 0, 832, 179, 1, 0, 0, 0, 833, 834, 3, 66, 27, 0, 834, 835, 1, 0, 0, 0, 835, 836, 6, 84, 12, 0, 836, 837, 6, 84, 13, 0, 837, 181, 1, 0, 0, 0, 838, 839, 3, 164, 76, 0, 839, 840, 1, 0, 0, 0, 840, 841, 6, 85, 10, 0, 841, 183, 1, 0, 0, 0, 842, 843, 3, 166, 77, 0, 843, 844, 1, 0, 0, 0, 844, 845, 6, 86, 14, 0, 845, 185, 1, 0, 0, 0, 846, 847, 3, 104, 46, 0, 847, 848, 1, 0, 0, 0, 848, 849, 6, 87, 15, 0, 849, 187, 1, 0, 0, 0, 850, 851, 3, 100, 44, 0, 851, 852, 1, 0, 0, 0, 852, 853, 6, 88, 16, 0, 853, 189, 1, 0, 0, 0, 854, 855, 3, 88, 38, 0, 855, 856, 1, 0, 0, 0, 856, 857, 6, 89, 17, 0, 857, 191, 1, 0, 0, 0, 858, 859, 5, 111, 0, 0, 859, 860, 5, 112, 0, 0, 860, 861, 5, 116, 0, 0, 861, 862, 5, 105, 0, 0, 862, 863, 5, 111, 0, 0, 863, 864, 5, 110, 0, 0, 864, 865, 5, 115, 0, 0, 865, 193, 1, 0, 0, 0, 866, 867, 5, 109, 0, 0, 867, 868, 5, 101, 0, 0, 868, 869, 5, 116, 0, 0, 869, 870, 5, 97, 0, 0, 870, 871, 5, 100, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 116, 0, 0, 873, 874, 5, 97, 0, 0, 874, 195, 1, 0, 0, 0, 875, 879, 8, 10, 0, 0, 876, 877, 5, 47, 0, 0, 877, 879, 8, 11, 0, 0, 878, 875, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 879, 197, 1, 0, 0, 0, 880, 882, 3, 196, 92, 0, 881, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 881, 1, 0, 0, 0, 883, 884, 1, 0, 0, 0, 884, 199, 1, 0, 0, 0, 885, 886, 3, 50, 19, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 94, 9, 0, 888, 201, 1, 0, 0, 0, 889, 890, 3, 52, 20, 0, 890, 891, 1, 0, 0, 0, 891, 892, 6, 95, 9, 0, 892, 203, 1, 0, 0, 0, 893, 894, 3, 54, 21, 0, 894, 895, 1, 0, 0, 0, 895, 896, 6, 96, 9, 0, 896, 205, 1, 0, 0, 0, 897, 898, 3, 66, 27, 0, 898, 899, 1, 0, 0, 0, 899, 900, 6, 97, 12, 0, 900, 901, 6, 97, 13, 0, 901, 207, 1, 0, 0, 0, 902, 903, 3, 108, 48, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 98, 18, 0, 905, 209, 1, 0, 0, 0, 906, 907, 3, 104, 46, 0, 907, 908, 1, 0, 0, 0, 908, 909, 6, 99, 15, 0, 909, 211, 1, 0, 0, 0, 910, 915, 3, 70, 29, 0, 911, 915, 3, 68, 28, 0, 912, 915, 3, 84, 36, 0, 913, 915, 3, 158, 73, 0, 914, 910, 1, 0, 0, 0, 914, 911, 1, 0, 0, 0, 914, 912, 1, 0, 0, 0, 914, 913, 1, 0, 0, 0, 915, 213, 1, 0, 0, 0, 916, 919, 3, 70, 29, 0, 917, 919, 3, 158, 73, 0, 918, 916, 1, 0, 0, 0, 918, 917, 1, 0, 0, 0, 919, 923, 1, 0, 0, 0, 920, 922, 3, 212, 100, 0, 921, 920, 1, 0, 0, 0, 922, 925, 1, 0, 0, 0, 923, 921, 1, 0, 0, 0, 923, 924, 1, 0, 0, 0, 924, 936, 1, 0, 0, 0, 925, 923, 1, 0, 0, 0, 926, 929, 3, 84, 36, 0, 927, 929, 3, 78, 33, 0, 928, 926, 1, 0, 0, 0, 928, 927, 1, 0, 0, 0, 929, 931, 1, 0, 0, 0, 930, 932, 3, 212, 100, 0, 931, 930, 1, 0, 0, 0, 932, 933, 1, 0, 0, 0, 933, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 936, 1, 0, 0, 0, 935, 918, 1, 0, 0, 0, 935, 928, 1, 0, 0, 0, 936, 215, 1, 0, 0, 0, 937, 940, 3, 214, 101, 0, 938, 940, 3, 170, 79, 0, 939, 937, 1, 0, 0, 0, 939, 938, 1, 0, 0, 0, 940, 941, 1, 0, 0, 0, 941, 939, 1, 0, 0, 0, 941, 942, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 944, 3, 50, 19, 0, 944, 945, 1, 0, 0, 0, 945, 946, 6, 103, 9, 0, 946, 219, 1, 0, 0, 0, 947, 948, 3, 52, 20, 0, 948, 949, 1, 0, 0, 0, 949, 950, 6, 104, 9, 0, 950, 221, 1, 0, 0, 0, 951, 952, 3, 54, 21, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 105, 9, 0, 954, 223, 1, 0, 0, 0, 955, 956, 3, 66, 27, 0, 956, 957, 1, 0, 0, 0, 957, 958, 6, 106, 12, 0, 958, 959, 6, 106, 13, 0, 959, 225, 1, 0, 0, 0, 960, 961, 3, 100, 44, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 107, 16, 0, 963, 227, 1, 0, 0, 0, 964, 965, 3, 104, 46, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 108, 15, 0, 967, 229, 1, 0, 0, 0, 968, 969, 3, 108, 48, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 109, 18, 0, 971, 231, 1, 0, 0, 0, 972, 973, 5, 97, 0, 0, 973, 974, 5, 115, 0, 0, 974, 233, 1, 0, 0, 0, 975, 976, 3, 216, 102, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 111, 19, 0, 978, 235, 1, 0, 0, 0, 979, 980, 3, 50, 19, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 112, 9, 0, 982, 237, 1, 0, 0, 0, 983, 984, 3, 52, 20, 0, 984, 985, 1, 0, 0, 0, 985, 986, 6, 113, 9, 0, 986, 239, 1, 0, 0, 0, 987, 988, 3, 54, 21, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 114, 9, 0, 990, 241, 1, 0, 0, 0, 991, 992, 3, 66, 27, 0, 992, 993, 1, 0, 0, 0, 993, 994, 6, 115, 12, 0, 994, 995, 6, 115, 13, 0, 995, 243, 1, 0, 0, 0, 996, 997, 3, 164, 76, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 116, 10, 0, 999, 1000, 6, 116, 20, 0, 1000, 245, 1, 0, 0, 0, 1001, 1002, 5, 111, 0, 0, 1002, 1003, 5, 110, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 21, 0, 1005, 247, 1, 0, 0, 0, 1006, 1007, 5, 119, 0, 0, 1007, 1008, 5, 105, 0, 0, 1008, 1009, 5, 116, 0, 0, 1009, 1010, 5, 104, 0, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 118, 21, 0, 1012, 249, 1, 0, 0, 0, 1013, 1014, 8, 12, 0, 0, 1014, 251, 1, 0, 0, 0, 1015, 1017, 3, 250, 119, 0, 1016, 1015, 1, 0, 0, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1016, 1, 0, 0, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 3, 320, 154, 0, 1021, 1023, 1, 0, 0, 0, 1022, 1016, 1, 0, 0, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1025, 1, 0, 0, 0, 1024, 1026, 3, 250, 119, 0, 1025, 1024, 1, 0, 0, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1025, 1, 0, 0, 0, 1027, 1028, 1, 0, 0, 0, 1028, 253, 1, 0, 0, 0, 1029, 1030, 3, 172, 80, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 121, 22, 0, 1032, 255, 1, 0, 0, 0, 1033, 1034, 3, 252, 120, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 122, 23, 0, 1036, 257, 1, 0, 0, 0, 1037, 1038, 3, 50, 19, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 123, 9, 0, 1040, 259, 1, 0, 0, 0, 1041, 1042, 3, 52, 20, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1044, 6, 124, 9, 0, 1044, 261, 1, 0, 0, 0, 1045, 1046, 3, 54, 21, 0, 1046, 1047, 1, 0, 0, 0, 1047, 1048, 6, 125, 9, 0, 1048, 263, 1, 0, 0, 0, 1049, 1050, 3, 66, 27, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 126, 12, 0, 1052, 1053, 6, 126, 13, 0, 1053, 1054, 6, 126, 13, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 100, 44, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 16, 0, 1058, 267, 1, 0, 0, 0, 1059, 1060, 3, 104, 46, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1062, 6, 128, 15, 0, 1062, 269, 1, 0, 0, 0, 1063, 1064, 3, 108, 48, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 129, 18, 0, 1066, 271, 1, 0, 0, 0, 1067, 1068, 3, 248, 118, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 130, 24, 0, 1070, 273, 1, 0, 0, 0, 1071, 1072, 3, 216, 102, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1074, 6, 131, 19, 0, 1074, 275, 1, 0, 0, 0, 1075, 1076, 3, 172, 80, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 132, 22, 0, 1078, 277, 1, 0, 0, 0, 1079, 1080, 3, 50, 19, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 133, 9, 0, 1082, 279, 1, 0, 0, 0, 1083, 1084, 3, 52, 20, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 134, 9, 0, 1086, 281, 1, 0, 0, 0, 1087, 1088, 3, 54, 21, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 135, 9, 0, 1090, 283, 1, 0, 0, 0, 1091, 1092, 3, 66, 27, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1094, 6, 136, 12, 0, 1094, 1095, 6, 136, 13, 0, 1095, 285, 1, 0, 0, 0, 1096, 1097, 3, 108, 48, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 137, 18, 0, 1099, 287, 1, 0, 0, 0, 1100, 1101, 3, 172, 80, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 138, 22, 0, 1103, 289, 1, 0, 0, 0, 1104, 1105, 3, 168, 78, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 139, 25, 0, 1107, 291, 1, 0, 0, 0, 1108, 1109, 3, 50, 19, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 140, 9, 0, 1111, 293, 1, 0, 0, 0, 1112, 1113, 3, 52, 20, 0, 1113, 1114, 1, 0, 0, 0, 1114, 1115, 6, 141, 9, 0, 1115, 295, 1, 0, 0, 0, 1116, 1117, 3, 54, 21, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 142, 9, 0, 1119, 297, 1, 0, 0, 0, 1120, 1121, 3, 66, 27, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 143, 12, 0, 1123, 1124, 6, 143, 13, 0, 1124, 299, 1, 0, 0, 0, 1125, 1126, 5, 105, 0, 0, 1126, 1127, 5, 110, 0, 0, 1127, 1128, 5, 102, 0, 0, 1128, 1129, 5, 111, 0, 0, 1129, 301, 1, 0, 0, 0, 1130, 1131, 3, 50, 19, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1133, 6, 145, 9, 0, 1133, 303, 1, 0, 0, 0, 1134, 1135, 3, 52, 20, 0, 1135, 1136, 1, 0, 0, 0, 1136, 1137, 6, 146, 9, 0, 1137, 305, 1, 0, 0, 0, 1138, 1139, 3, 54, 21, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 147, 9, 0, 1141, 307, 1, 0, 0, 0, 1142, 1143, 3, 66, 27, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 148, 12, 0, 1145, 1146, 6, 148, 13, 0, 1146, 309, 1, 0, 0, 0, 1147, 1148, 5, 102, 0, 0, 1148, 1149, 5, 117, 0, 0, 1149, 1150, 5, 110, 0, 0, 1150, 1151, 5, 99, 0, 0, 1151, 1152, 5, 116, 0, 0, 1152, 1153, 5, 105, 0, 0, 1153, 1154, 5, 111, 0, 0, 1154, 1155, 5, 110, 0, 0, 1155, 1156, 5, 115, 0, 0, 1156, 311, 1, 0, 0, 0, 1157, 1158, 3, 50, 19, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 150, 9, 0, 1160, 313, 1, 0, 0, 0, 1161, 1162, 3, 52, 20, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1164, 6, 151, 9, 0, 1164, 315, 1, 0, 0, 0, 1165, 1166, 3, 54, 21, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 152, 9, 0, 1168, 317, 1, 0, 0, 0, 1169, 1170, 3, 166, 77, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1172, 6, 153, 14, 0, 1172, 1173, 6, 153, 13, 0, 1173, 319, 1, 0, 0, 0, 1174, 1175, 5, 58, 0, 0, 1175, 321, 1, 0, 0, 0, 1176, 1182, 3, 78, 33, 0, 1177, 1182, 3, 68, 28, 0, 1178, 1182, 3, 108, 48, 0, 1179, 1182, 3, 70, 29, 0, 1180, 1182, 3, 84, 36, 0, 1181, 1176, 1, 0, 0, 0, 1181, 1177, 1, 0, 0, 0, 1181, 1178, 1, 0, 0, 0, 1181, 1179, 1, 0, 0, 0, 1181, 1180, 1, 0, 0, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1181, 1, 0, 0, 0, 1183, 1184, 1, 0, 0, 0, 1184, 323, 1, 0, 0, 0, 1185, 1186, 3, 50, 19, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 156, 9, 0, 1188, 325, 1, 0, 0, 0, 1189, 1190, 3, 52, 20, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 157, 9, 0, 1192, 327, 1, 0, 0, 0, 1193, 1194, 3, 54, 21, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 158, 9, 0, 1196, 329, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 483, 493, 497, 500, 509, 511, 522, 563, 568, 577, 584, 589, 591, 602, 610, 613, 615, 620, 625, 631, 638, 643, 649, 652, 660, 664, 797, 802, 807, 809, 815, 878, 883, 914, 918, 923, 928, 933, 935, 939, 941, 1018, 1022, 1027, 1181, 1183, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 68, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index ac3354d0aa907..75fa8061fa48b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -69,11 +69,11 @@ private static String[] makeRuleNames() { "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "FROM_QUOTED_STRING", "OPTIONS", "METADATA", - "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", - "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", - "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", - "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", - "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", + "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", + "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", + "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", + "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", @@ -191,7 +191,7 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000n\u04b3\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000n\u04ad\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ @@ -234,720 +234,716 @@ public EsqlBaseLexer(CharStream input) { "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ "\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e"+ - "\u0002\u009f\u0007\u009f\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ + "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ + "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01e4\b\u0012\u000b\u0012\f"+ - "\u0012\u01e5\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0005\u0013\u01ee\b\u0013\n\u0013\f\u0013\u01f1\t\u0013\u0001"+ - "\u0013\u0003\u0013\u01f4\b\u0013\u0001\u0013\u0003\u0013\u01f7\b\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0005\u0014\u0200\b\u0014\n\u0014\f\u0014\u0203\t\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004"+ - "\u0015\u020b\b\u0015\u000b\u0015\f\u0015\u020c\u0001\u0015\u0001\u0015"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001"+ - " \u0003 \u0236\b \u0001 \u0004 \u0239\b \u000b \f \u023a\u0001!\u0001"+ - "!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0244\b#\u0001$\u0001$\u0001"+ - "%\u0001%\u0001%\u0003%\u024b\b%\u0001&\u0001&\u0001&\u0005&\u0250\b&\n"+ - "&\f&\u0253\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u025b\b"+ - "&\n&\f&\u025e\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0265\b&\u0001"+ - "&\u0003&\u0268\b&\u0003&\u026a\b&\u0001\'\u0004\'\u026d\b\'\u000b\'\f"+ - "\'\u026e\u0001(\u0004(\u0272\b(\u000b(\f(\u0273\u0001(\u0001(\u0005(\u0278"+ - "\b(\n(\f(\u027b\t(\u0001(\u0001(\u0004(\u027f\b(\u000b(\f(\u0280\u0001"+ - "(\u0004(\u0284\b(\u000b(\f(\u0285\u0001(\u0001(\u0005(\u028a\b(\n(\f("+ - "\u028d\t(\u0003(\u028f\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0295\b("+ - "\u000b(\f(\u0296\u0001(\u0001(\u0003(\u029b\b(\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - "-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001"+ - "0\u00010\u00011\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ - "2\u00012\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u0001"+ - "4\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ - "9\u0001:\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001"+ - "<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ - "?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ - "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ - "E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001"+ - "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001"+ - "M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005N\u031e\bN\nN\fN\u0321\tN\u0001"+ - "N\u0001N\u0003N\u0325\bN\u0001N\u0004N\u0328\bN\u000bN\fN\u0329\u0003"+ - "N\u032c\bN\u0001O\u0001O\u0004O\u0330\bO\u000bO\fO\u0331\u0001O\u0001"+ - "O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ - "R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001"+ - "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001"+ - "[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003"+ - "\\\u0371\b\\\u0001]\u0004]\u0374\b]\u000b]\f]\u0375\u0001^\u0001^\u0001"+ - "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ - "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ - "e\u0003e\u0399\be\u0001f\u0001f\u0003f\u039d\bf\u0001f\u0005f\u03a0\b"+ - "f\nf\ff\u03a3\tf\u0001f\u0001f\u0003f\u03a7\bf\u0001f\u0004f\u03aa\bf"+ - "\u000bf\ff\u03ab\u0003f\u03ae\bf\u0001g\u0001g\u0004g\u03b2\bg\u000bg"+ - "\fg\u03b3\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001"+ - "j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ - "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ - "n\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001"+ - "q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001"+ - "t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ - "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001"+ - "w\u0001w\u0001x\u0001x\u0001y\u0004y\u03ff\by\u000by\fy\u0400\u0001y\u0001"+ - "y\u0003y\u0405\by\u0001y\u0004y\u0408\by\u000by\fy\u0409\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ - "|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f"+ - "\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080"+ - "\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081"+ - "\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083"+ - "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084"+ - "\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086"+ - "\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087"+ - "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ - "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a"+ - "\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b"+ - "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d"+ - "\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091"+ - "\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097"+ - "\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098"+ - "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a"+ - "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ - "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0004\u009c"+ - "\u04a4\b\u009c\u000b\u009c\f\u009c\u04a5\u0001\u009d\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0002\u0201\u025c\u0000\u00a0"+ - "\f\u0001\u000e\u0002\u0010\u0003\u0012\u0004\u0014\u0005\u0016\u0006\u0018"+ - "\u0007\u001a\b\u001c\t\u001e\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011"+ - ".\u00120\u00132\u00144\u00156\u00168\u0000:\u0000<\u0017>\u0018@\u0019"+ - "B\u001aD\u0000F\u0000H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000"+ - "V\u0000X\u001bZ\u001c\\\u001d^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+"+ - "z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6\u00907"+ - "\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4"+ - "A\u00a6B\u00a8C\u00aa\u0000\u00acD\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6"+ - "\u0000\u00b8\u0000\u00ba\u0000\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4"+ - "\u0000\u00c6J\u00c8\u0000\u00caK\u00ccL\u00ceM\u00d0\u0000\u00d2\u0000"+ - "\u00d4\u0000\u00d6\u0000\u00d8\u0000\u00daN\u00dcO\u00deP\u00e0Q\u00e2"+ - "\u0000\u00e4\u0000\u00e6\u0000\u00e8\u0000\u00eaR\u00ec\u0000\u00eeS\u00f0"+ - "T\u00f2U\u00f4\u0000\u00f6\u0000\u00f8V\u00faW\u00fc\u0000\u00feX\u0100"+ - "\u0000\u0102\u0000\u0104Y\u0106Z\u0108[\u010a\u0000\u010c\u0000\u010e"+ - "\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116\u0000\u0118\\\u011a]"+ - "\u011c^\u011e\u0000\u0120\u0000\u0122\u0000\u0124\u0000\u0126_\u0128`"+ - "\u012aa\u012c\u0000\u012eb\u0130c\u0132d\u0134e\u0136\u0000\u0138f\u013a"+ - "g\u013ch\u013ei\u0140\u0000\u0142j\u0144k\u0146l\u0148m\u014an\f\u0000"+ - "\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t"+ - "\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u00000"+ - "9\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\"+ - "\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,/"+ - "/==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04ce"+ - "\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001\u0000\u0000\u0000\u0000"+ - "\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000"+ - "\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000"+ - "\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000"+ - "\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000"+ - " \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001"+ - "\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000"+ - "\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000"+ - ".\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001"+ - "\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000"+ - "\u0000\u00018\u0001\u0000\u0000\u0000\u0001:\u0001\u0000\u0000\u0000\u0001"+ - "<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000\u0000\u0000\u0001@\u0001"+ - "\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000"+ - "\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000"+ - "\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b"+ - "\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000"+ - "\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000"+ - "\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p"+ - "\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000"+ - "\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000"+ - "\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~"+ - "\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082"+ - "\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086"+ - "\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a"+ - "\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e"+ - "\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092"+ - "\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096"+ - "\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a"+ - "\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e"+ - "\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000\u0000\u0002\u00a2"+ - "\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000\u0000\u0002\u00a6"+ - "\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000\u0000\u0002\u00ac"+ - "\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0"+ - "\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4"+ - "\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8"+ - "\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc"+ - "\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000\u0000\u0003\u00c0"+ - "\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000\u0000\u0003\u00c6"+ - "\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca"+ - "\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce"+ - "\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000\u0000\u0004\u00d2"+ - "\u0001\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00da"+ - "\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00de"+ - "\u0001\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0005\u00e2"+ - "\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6"+ - "\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea"+ - "\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee"+ - "\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2"+ - "\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f6"+ - "\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa"+ - "\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000\u0000\u0006\u0100"+ - "\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0006\u0104"+ - "\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108"+ - "\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c"+ - "\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110"+ - "\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114"+ - "\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118"+ - "\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c"+ - "\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000\b\u0120\u0001"+ - "\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000"+ - "\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000\u0000"+ - "\u0000\b\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000\u0000\u0000"+ - "\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000\u0000\t\u0132"+ - "\u0001\u0000\u0000\u0000\t\u0134\u0001\u0000\u0000\u0000\n\u0136\u0001"+ - "\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a\u0001\u0000"+ - "\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\n\u013e\u0001\u0000\u0000"+ - "\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001\u0000\u0000"+ - "\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001\u0000\u0000"+ - "\u0000\u000b\u0148\u0001\u0000\u0000\u0000\u000b\u014a\u0001\u0000\u0000"+ - "\u0000\f\u014c\u0001\u0000\u0000\u0000\u000e\u0156\u0001\u0000\u0000\u0000"+ - "\u0010\u015d\u0001\u0000\u0000\u0000\u0012\u0166\u0001\u0000\u0000\u0000"+ - "\u0014\u016d\u0001\u0000\u0000\u0000\u0016\u0177\u0001\u0000\u0000\u0000"+ - "\u0018\u017e\u0001\u0000\u0000\u0000\u001a\u0185\u0001\u0000\u0000\u0000"+ - "\u001c\u0193\u0001\u0000\u0000\u0000\u001e\u019a\u0001\u0000\u0000\u0000"+ - " \u01a2\u0001\u0000\u0000\u0000\"\u01a9\u0001\u0000\u0000\u0000$\u01b5"+ - "\u0001\u0000\u0000\u0000&\u01be\u0001\u0000\u0000\u0000(\u01c4\u0001\u0000"+ - "\u0000\u0000*\u01cb\u0001\u0000\u0000\u0000,\u01d2\u0001\u0000\u0000\u0000"+ - ".\u01da\u0001\u0000\u0000\u00000\u01e3\u0001\u0000\u0000\u00002\u01e9"+ - "\u0001\u0000\u0000\u00004\u01fa\u0001\u0000\u0000\u00006\u020a\u0001\u0000"+ - "\u0000\u00008\u0210\u0001\u0000\u0000\u0000:\u0215\u0001\u0000\u0000\u0000"+ - "<\u021a\u0001\u0000\u0000\u0000>\u021e\u0001\u0000\u0000\u0000@\u0222"+ - "\u0001\u0000\u0000\u0000B\u0226\u0001\u0000\u0000\u0000D\u022a\u0001\u0000"+ - "\u0000\u0000F\u022c\u0001\u0000\u0000\u0000H\u022e\u0001\u0000\u0000\u0000"+ - "J\u0231\u0001\u0000\u0000\u0000L\u0233\u0001\u0000\u0000\u0000N\u023c"+ - "\u0001\u0000\u0000\u0000P\u023e\u0001\u0000\u0000\u0000R\u0243\u0001\u0000"+ - "\u0000\u0000T\u0245\u0001\u0000\u0000\u0000V\u024a\u0001\u0000\u0000\u0000"+ - "X\u0269\u0001\u0000\u0000\u0000Z\u026c\u0001\u0000\u0000\u0000\\\u029a"+ - "\u0001\u0000\u0000\u0000^\u029c\u0001\u0000\u0000\u0000`\u029f\u0001\u0000"+ - "\u0000\u0000b\u02a3\u0001\u0000\u0000\u0000d\u02a7\u0001\u0000\u0000\u0000"+ - "f\u02a9\u0001\u0000\u0000\u0000h\u02ac\u0001\u0000\u0000\u0000j\u02ae"+ - "\u0001\u0000\u0000\u0000l\u02b3\u0001\u0000\u0000\u0000n\u02b5\u0001\u0000"+ - "\u0000\u0000p\u02bb\u0001\u0000\u0000\u0000r\u02c1\u0001\u0000\u0000\u0000"+ - "t\u02c6\u0001\u0000\u0000\u0000v\u02c8\u0001\u0000\u0000\u0000x\u02cb"+ - "\u0001\u0000\u0000\u0000z\u02ce\u0001\u0000\u0000\u0000|\u02d3\u0001\u0000"+ - "\u0000\u0000~\u02d7\u0001\u0000\u0000\u0000\u0080\u02dc\u0001\u0000\u0000"+ - "\u0000\u0082\u02e2\u0001\u0000\u0000\u0000\u0084\u02e5\u0001\u0000\u0000"+ - "\u0000\u0086\u02e7\u0001\u0000\u0000\u0000\u0088\u02ed\u0001\u0000\u0000"+ - "\u0000\u008a\u02ef\u0001\u0000\u0000\u0000\u008c\u02f4\u0001\u0000\u0000"+ - "\u0000\u008e\u02f7\u0001\u0000\u0000\u0000\u0090\u02fa\u0001\u0000\u0000"+ - "\u0000\u0092\u02fd\u0001\u0000\u0000\u0000\u0094\u02ff\u0001\u0000\u0000"+ - "\u0000\u0096\u0302\u0001\u0000\u0000\u0000\u0098\u0304\u0001\u0000\u0000"+ - "\u0000\u009a\u0307\u0001\u0000\u0000\u0000\u009c\u0309\u0001\u0000\u0000"+ - "\u0000\u009e\u030b\u0001\u0000\u0000\u0000\u00a0\u030d\u0001\u0000\u0000"+ - "\u0000\u00a2\u030f\u0001\u0000\u0000\u0000\u00a4\u0311\u0001\u0000\u0000"+ - "\u0000\u00a6\u0316\u0001\u0000\u0000\u0000\u00a8\u032b\u0001\u0000\u0000"+ - "\u0000\u00aa\u032d\u0001\u0000\u0000\u0000\u00ac\u0335\u0001\u0000\u0000"+ - "\u0000\u00ae\u0337\u0001\u0000\u0000\u0000\u00b0\u033b\u0001\u0000\u0000"+ - "\u0000\u00b2\u033f\u0001\u0000\u0000\u0000\u00b4\u0343\u0001\u0000\u0000"+ - "\u0000\u00b6\u0348\u0001\u0000\u0000\u0000\u00b8\u034c\u0001\u0000\u0000"+ - "\u0000\u00ba\u0350\u0001\u0000\u0000\u0000\u00bc\u0354\u0001\u0000\u0000"+ - "\u0000\u00be\u0358\u0001\u0000\u0000\u0000\u00c0\u035c\u0001\u0000\u0000"+ - "\u0000\u00c2\u0364\u0001\u0000\u0000\u0000\u00c4\u0370\u0001\u0000\u0000"+ - "\u0000\u00c6\u0373\u0001\u0000\u0000\u0000\u00c8\u0377\u0001\u0000\u0000"+ - "\u0000\u00ca\u037b\u0001\u0000\u0000\u0000\u00cc\u037f\u0001\u0000\u0000"+ - "\u0000\u00ce\u0383\u0001\u0000\u0000\u0000\u00d0\u0387\u0001\u0000\u0000"+ - "\u0000\u00d2\u038c\u0001\u0000\u0000\u0000\u00d4\u0390\u0001\u0000\u0000"+ - "\u0000\u00d6\u0398\u0001\u0000\u0000\u0000\u00d8\u03ad\u0001\u0000\u0000"+ - "\u0000\u00da\u03b1\u0001\u0000\u0000\u0000\u00dc\u03b5\u0001\u0000\u0000"+ - "\u0000\u00de\u03b9\u0001\u0000\u0000\u0000\u00e0\u03bd\u0001\u0000\u0000"+ - "\u0000\u00e2\u03c1\u0001\u0000\u0000\u0000\u00e4\u03c6\u0001\u0000\u0000"+ - "\u0000\u00e6\u03ca\u0001\u0000\u0000\u0000\u00e8\u03ce\u0001\u0000\u0000"+ - "\u0000\u00ea\u03d2\u0001\u0000\u0000\u0000\u00ec\u03d5\u0001\u0000\u0000"+ - "\u0000\u00ee\u03d9\u0001\u0000\u0000\u0000\u00f0\u03dd\u0001\u0000\u0000"+ - "\u0000\u00f2\u03e1\u0001\u0000\u0000\u0000\u00f4\u03e5\u0001\u0000\u0000"+ - "\u0000\u00f6\u03ea\u0001\u0000\u0000\u0000\u00f8\u03ef\u0001\u0000\u0000"+ - "\u0000\u00fa\u03f4\u0001\u0000\u0000\u0000\u00fc\u03fb\u0001\u0000\u0000"+ - "\u0000\u00fe\u0404\u0001\u0000\u0000\u0000\u0100\u040b\u0001\u0000\u0000"+ - "\u0000\u0102\u040f\u0001\u0000\u0000\u0000\u0104\u0413\u0001\u0000\u0000"+ - "\u0000\u0106\u0417\u0001\u0000\u0000\u0000\u0108\u041b\u0001\u0000\u0000"+ - "\u0000\u010a\u041f\u0001\u0000\u0000\u0000\u010c\u0425\u0001\u0000\u0000"+ - "\u0000\u010e\u0429\u0001\u0000\u0000\u0000\u0110\u042d\u0001\u0000\u0000"+ - "\u0000\u0112\u0431\u0001\u0000\u0000\u0000\u0114\u0435\u0001\u0000\u0000"+ - "\u0000\u0116\u0439\u0001\u0000\u0000\u0000\u0118\u043d\u0001\u0000\u0000"+ - "\u0000\u011a\u0441\u0001\u0000\u0000\u0000\u011c\u0445\u0001\u0000\u0000"+ - "\u0000\u011e\u0449\u0001\u0000\u0000\u0000\u0120\u044e\u0001\u0000\u0000"+ - "\u0000\u0122\u0452\u0001\u0000\u0000\u0000\u0124\u0456\u0001\u0000\u0000"+ - "\u0000\u0126\u045a\u0001\u0000\u0000\u0000\u0128\u045e\u0001\u0000\u0000"+ - "\u0000\u012a\u0462\u0001\u0000\u0000\u0000\u012c\u0466\u0001\u0000\u0000"+ - "\u0000\u012e\u046b\u0001\u0000\u0000\u0000\u0130\u0470\u0001\u0000\u0000"+ - "\u0000\u0132\u0474\u0001\u0000\u0000\u0000\u0134\u0478\u0001\u0000\u0000"+ - "\u0000\u0136\u047c\u0001\u0000\u0000\u0000\u0138\u0481\u0001\u0000\u0000"+ - "\u0000\u013a\u048b\u0001\u0000\u0000\u0000\u013c\u048f\u0001\u0000\u0000"+ - "\u0000\u013e\u0493\u0001\u0000\u0000\u0000\u0140\u0497\u0001\u0000\u0000"+ - "\u0000\u0142\u049c\u0001\u0000\u0000\u0000\u0144\u04a3\u0001\u0000\u0000"+ - "\u0000\u0146\u04a7\u0001\u0000\u0000\u0000\u0148\u04ab\u0001\u0000\u0000"+ - "\u0000\u014a\u04af\u0001\u0000\u0000\u0000\u014c\u014d\u0005d\u0000\u0000"+ - "\u014d\u014e\u0005i\u0000\u0000\u014e\u014f\u0005s\u0000\u0000\u014f\u0150"+ - "\u0005s\u0000\u0000\u0150\u0151\u0005e\u0000\u0000\u0151\u0152\u0005c"+ - "\u0000\u0000\u0152\u0153\u0005t\u0000\u0000\u0153\u0154\u0001\u0000\u0000"+ - "\u0000\u0154\u0155\u0006\u0000\u0000\u0000\u0155\r\u0001\u0000\u0000\u0000"+ - "\u0156\u0157\u0005d\u0000\u0000\u0157\u0158\u0005r\u0000\u0000\u0158\u0159"+ - "\u0005o\u0000\u0000\u0159\u015a\u0005p\u0000\u0000\u015a\u015b\u0001\u0000"+ - "\u0000\u0000\u015b\u015c\u0006\u0001\u0001\u0000\u015c\u000f\u0001\u0000"+ - "\u0000\u0000\u015d\u015e\u0005e\u0000\u0000\u015e\u015f\u0005n\u0000\u0000"+ - "\u015f\u0160\u0005r\u0000\u0000\u0160\u0161\u0005i\u0000\u0000\u0161\u0162"+ - "\u0005c\u0000\u0000\u0162\u0163\u0005h\u0000\u0000\u0163\u0164\u0001\u0000"+ - "\u0000\u0000\u0164\u0165\u0006\u0002\u0002\u0000\u0165\u0011\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0005e\u0000\u0000\u0167\u0168\u0005v\u0000\u0000"+ - "\u0168\u0169\u0005a\u0000\u0000\u0169\u016a\u0005l\u0000\u0000\u016a\u016b"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0006\u0003\u0000\u0000\u016c\u0013"+ - "\u0001\u0000\u0000\u0000\u016d\u016e\u0005e\u0000\u0000\u016e\u016f\u0005"+ - "x\u0000\u0000\u016f\u0170\u0005p\u0000\u0000\u0170\u0171\u0005l\u0000"+ - "\u0000\u0171\u0172\u0005a\u0000\u0000\u0172\u0173\u0005i\u0000\u0000\u0173"+ - "\u0174\u0005n\u0000\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u0175\u0176"+ - "\u0006\u0004\u0003\u0000\u0176\u0015\u0001\u0000\u0000\u0000\u0177\u0178"+ - "\u0005f\u0000\u0000\u0178\u0179\u0005r\u0000\u0000\u0179\u017a\u0005o"+ - "\u0000\u0000\u017a\u017b\u0005m\u0000\u0000\u017b\u017c\u0001\u0000\u0000"+ - "\u0000\u017c\u017d\u0006\u0005\u0004\u0000\u017d\u0017\u0001\u0000\u0000"+ - "\u0000\u017e\u017f\u0005g\u0000\u0000\u017f\u0180\u0005r\u0000\u0000\u0180"+ - "\u0181\u0005o\u0000\u0000\u0181\u0182\u0005k\u0000\u0000\u0182\u0183\u0001"+ - "\u0000\u0000\u0000\u0183\u0184\u0006\u0006\u0000\u0000\u0184\u0019\u0001"+ - "\u0000\u0000\u0000\u0185\u0186\u0005i\u0000\u0000\u0186\u0187\u0005n\u0000"+ - "\u0000\u0187\u0188\u0005l\u0000\u0000\u0188\u0189\u0005i\u0000\u0000\u0189"+ - "\u018a\u0005n\u0000\u0000\u018a\u018b\u0005e\u0000\u0000\u018b\u018c\u0005"+ - "s\u0000\u0000\u018c\u018d\u0005t\u0000\u0000\u018d\u018e\u0005a\u0000"+ - "\u0000\u018e\u018f\u0005t\u0000\u0000\u018f\u0190\u0005s\u0000\u0000\u0190"+ - "\u0191\u0001\u0000\u0000\u0000\u0191\u0192\u0006\u0007\u0000\u0000\u0192"+ - "\u001b\u0001\u0000\u0000\u0000\u0193\u0194\u0005k\u0000\u0000\u0194\u0195"+ - "\u0005e\u0000\u0000\u0195\u0196\u0005e\u0000\u0000\u0196\u0197\u0005p"+ - "\u0000\u0000\u0197\u0198\u0001\u0000\u0000\u0000\u0198\u0199\u0006\b\u0001"+ - "\u0000\u0199\u001d\u0001\u0000\u0000\u0000\u019a\u019b\u0005l\u0000\u0000"+ - "\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005m\u0000\u0000\u019d\u019e"+ - "\u0005i\u0000\u0000\u019e\u019f\u0005t\u0000\u0000\u019f\u01a0\u0001\u0000"+ - "\u0000\u0000\u01a0\u01a1\u0006\t\u0000\u0000\u01a1\u001f\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a3\u0005m\u0000\u0000\u01a3\u01a4\u0005e\u0000\u0000\u01a4"+ - "\u01a5\u0005t\u0000\u0000\u01a5\u01a6\u0005a\u0000\u0000\u01a6\u01a7\u0001"+ - "\u0000\u0000\u0000\u01a7\u01a8\u0006\n\u0005\u0000\u01a8!\u0001\u0000"+ - "\u0000\u0000\u01a9\u01aa\u0005m\u0000\u0000\u01aa\u01ab\u0005v\u0000\u0000"+ - "\u01ab\u01ac\u0005_\u0000\u0000\u01ac\u01ad\u0005e\u0000\u0000\u01ad\u01ae"+ - "\u0005x\u0000\u0000\u01ae\u01af\u0005p\u0000\u0000\u01af\u01b0\u0005a"+ - "\u0000\u0000\u01b0\u01b1\u0005n\u0000\u0000\u01b1\u01b2\u0005d\u0000\u0000"+ - "\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u000b\u0006\u0000"+ - "\u01b4#\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005r\u0000\u0000\u01b6\u01b7"+ - "\u0005e\u0000\u0000\u01b7\u01b8\u0005n\u0000\u0000\u01b8\u01b9\u0005a"+ - "\u0000\u0000\u01b9\u01ba\u0005m\u0000\u0000\u01ba\u01bb\u0005e\u0000\u0000"+ - "\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\f\u0007\u0000\u01bd"+ - "%\u0001\u0000\u0000\u0000\u01be\u01bf\u0005r\u0000\u0000\u01bf\u01c0\u0005"+ - "o\u0000\u0000\u01c0\u01c1\u0005w\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000"+ - "\u0000\u01c2\u01c3\u0006\r\u0000\u0000\u01c3\'\u0001\u0000\u0000\u0000"+ - "\u01c4\u01c5\u0005s\u0000\u0000\u01c5\u01c6\u0005h\u0000\u0000\u01c6\u01c7"+ - "\u0005o\u0000\u0000\u01c7\u01c8\u0005w\u0000\u0000\u01c8\u01c9\u0001\u0000"+ - "\u0000\u0000\u01c9\u01ca\u0006\u000e\b\u0000\u01ca)\u0001\u0000\u0000"+ - "\u0000\u01cb\u01cc\u0005s\u0000\u0000\u01cc\u01cd\u0005o\u0000\u0000\u01cd"+ - "\u01ce\u0005r\u0000\u0000\u01ce\u01cf\u0005t\u0000\u0000\u01cf\u01d0\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d1\u0006\u000f\u0000\u0000\u01d1+\u0001\u0000"+ - "\u0000\u0000\u01d2\u01d3\u0005s\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000"+ - "\u01d4\u01d5\u0005a\u0000\u0000\u01d5\u01d6\u0005t\u0000\u0000\u01d6\u01d7"+ - "\u0005s\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01d9\u0006"+ - "\u0010\u0000\u0000\u01d9-\u0001\u0000\u0000\u0000\u01da\u01db\u0005w\u0000"+ - "\u0000\u01db\u01dc\u0005h\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd"+ - "\u01de\u0005r\u0000\u0000\u01de\u01df\u0005e\u0000\u0000\u01df\u01e0\u0001"+ - "\u0000\u0000\u0000\u01e0\u01e1\u0006\u0011\u0000\u0000\u01e1/\u0001\u0000"+ - "\u0000\u0000\u01e2\u01e4\b\u0000\u0000\u0000\u01e3\u01e2\u0001\u0000\u0000"+ - "\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000"+ - "\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0001\u0000\u0000"+ - "\u0000\u01e7\u01e8\u0006\u0012\u0000\u0000\u01e81\u0001\u0000\u0000\u0000"+ - "\u01e9\u01ea\u0005/\u0000\u0000\u01ea\u01eb\u0005/\u0000\u0000\u01eb\u01ef"+ - "\u0001\u0000\u0000\u0000\u01ec\u01ee\b\u0001\u0000\u0000\u01ed\u01ec\u0001"+ - "\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001"+ - "\u0000\u0000\u0000\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001"+ - "\u0000\u0000\u0000\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f2\u01f4\u0005"+ - "\r\u0000\u0000\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000"+ - "\u0000\u0000\u01f4\u01f6\u0001\u0000\u0000\u0000\u01f5\u01f7\u0005\n\u0000"+ - "\u0000\u01f6\u01f5\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000\u0000"+ - "\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\u0013\t\u0000"+ - "\u01f93\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005/\u0000\u0000\u01fb\u01fc"+ - "\u0005*\u0000\u0000\u01fc\u0201\u0001\u0000\u0000\u0000\u01fd\u0200\u0003"+ - "4\u0014\u0000\u01fe\u0200\t\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000"+ - "\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000"+ - "\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000"+ - "\u0000\u0000\u0202\u0204\u0001\u0000\u0000\u0000\u0203\u0201\u0001\u0000"+ - "\u0000\u0000\u0204\u0205\u0005*\u0000\u0000\u0205\u0206\u0005/\u0000\u0000"+ - "\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0006\u0014\t\u0000\u0208"+ - "5\u0001\u0000\u0000\u0000\u0209\u020b\u0007\u0002\u0000\u0000\u020a\u0209"+ - "\u0001\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020a"+ - "\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000\u0000\u0000\u020d\u020e"+ - "\u0001\u0000\u0000\u0000\u020e\u020f\u0006\u0015\t\u0000\u020f7\u0001"+ - "\u0000\u0000\u0000\u0210\u0211\u0003\u00a4L\u0000\u0211\u0212\u0001\u0000"+ - "\u0000\u0000\u0212\u0213\u0006\u0016\n\u0000\u0213\u0214\u0006\u0016\u000b"+ - "\u0000\u02149\u0001\u0000\u0000\u0000\u0215\u0216\u0003B\u001b\u0000\u0216"+ - "\u0217\u0001\u0000\u0000\u0000\u0217\u0218\u0006\u0017\f\u0000\u0218\u0219"+ - "\u0006\u0017\r\u0000\u0219;\u0001\u0000\u0000\u0000\u021a\u021b\u0003"+ - "6\u0015\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c\u021d\u0006\u0018"+ - "\t\u0000\u021d=\u0001\u0000\u0000\u0000\u021e\u021f\u00032\u0013\u0000"+ - "\u021f\u0220\u0001\u0000\u0000\u0000\u0220\u0221\u0006\u0019\t\u0000\u0221"+ - "?\u0001\u0000\u0000\u0000\u0222\u0223\u00034\u0014\u0000\u0223\u0224\u0001"+ - "\u0000\u0000\u0000\u0224\u0225\u0006\u001a\t\u0000\u0225A\u0001\u0000"+ - "\u0000\u0000\u0226\u0227\u0005|\u0000\u0000\u0227\u0228\u0001\u0000\u0000"+ - "\u0000\u0228\u0229\u0006\u001b\r\u0000\u0229C\u0001\u0000\u0000\u0000"+ - "\u022a\u022b\u0007\u0003\u0000\u0000\u022bE\u0001\u0000\u0000\u0000\u022c"+ - "\u022d\u0007\u0004\u0000\u0000\u022dG\u0001\u0000\u0000\u0000\u022e\u022f"+ - "\u0005\\\u0000\u0000\u022f\u0230\u0007\u0005\u0000\u0000\u0230I\u0001"+ - "\u0000\u0000\u0000\u0231\u0232\b\u0006\u0000\u0000\u0232K\u0001\u0000"+ - "\u0000\u0000\u0233\u0235\u0007\u0007\u0000\u0000\u0234\u0236\u0007\b\u0000"+ - "\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0235\u0236\u0001\u0000\u0000"+ - "\u0000\u0236\u0238\u0001\u0000\u0000\u0000\u0237\u0239\u0003D\u001c\u0000"+ - "\u0238\u0237\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000"+ - "\u023a\u0238\u0001\u0000\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000"+ - "\u023bM\u0001\u0000\u0000\u0000\u023c\u023d\u0005@\u0000\u0000\u023dO"+ - "\u0001\u0000\u0000\u0000\u023e\u023f\u0005`\u0000\u0000\u023fQ\u0001\u0000"+ - "\u0000\u0000\u0240\u0244\b\t\u0000\u0000\u0241\u0242\u0005`\u0000\u0000"+ - "\u0242\u0244\u0005`\u0000\u0000\u0243\u0240\u0001\u0000\u0000\u0000\u0243"+ - "\u0241\u0001\u0000\u0000\u0000\u0244S\u0001\u0000\u0000\u0000\u0245\u0246"+ - "\u0005_\u0000\u0000\u0246U\u0001\u0000\u0000\u0000\u0247\u024b\u0003F"+ - "\u001d\u0000\u0248\u024b\u0003D\u001c\u0000\u0249\u024b\u0003T$\u0000"+ - "\u024a\u0247\u0001\u0000\u0000\u0000\u024a\u0248\u0001\u0000\u0000\u0000"+ - "\u024a\u0249\u0001\u0000\u0000\u0000\u024bW\u0001\u0000\u0000\u0000\u024c"+ - "\u0251\u0005\"\u0000\u0000\u024d\u0250\u0003H\u001e\u0000\u024e\u0250"+ - "\u0003J\u001f\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u024f\u024e\u0001"+ - "\u0000\u0000\u0000\u0250\u0253\u0001\u0000\u0000\u0000\u0251\u024f\u0001"+ - "\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0254\u0001"+ - "\u0000\u0000\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0254\u026a\u0005"+ - "\"\u0000\u0000\u0255\u0256\u0005\"\u0000\u0000\u0256\u0257\u0005\"\u0000"+ - "\u0000\u0257\u0258\u0005\"\u0000\u0000\u0258\u025c\u0001\u0000\u0000\u0000"+ - "\u0259\u025b\b\u0001\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025b"+ - "\u025e\u0001\u0000\u0000\u0000\u025c\u025d\u0001\u0000\u0000\u0000\u025c"+ - "\u025a\u0001\u0000\u0000\u0000\u025d\u025f\u0001\u0000\u0000\u0000\u025e"+ - "\u025c\u0001\u0000\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0261"+ - "\u0005\"\u0000\u0000\u0261\u0262\u0005\"\u0000\u0000\u0262\u0264\u0001"+ - "\u0000\u0000\u0000\u0263\u0265\u0005\"\u0000\u0000\u0264\u0263\u0001\u0000"+ - "\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0267\u0001\u0000"+ - "\u0000\u0000\u0266\u0268\u0005\"\u0000\u0000\u0267\u0266\u0001\u0000\u0000"+ - "\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u026a\u0001\u0000\u0000"+ - "\u0000\u0269\u024c\u0001\u0000\u0000\u0000\u0269\u0255\u0001\u0000\u0000"+ - "\u0000\u026aY\u0001\u0000\u0000\u0000\u026b\u026d\u0003D\u001c\u0000\u026c"+ - "\u026b\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e"+ - "\u026c\u0001\u0000\u0000\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f"+ - "[\u0001\u0000\u0000\u0000\u0270\u0272\u0003D\u001c\u0000\u0271\u0270\u0001"+ - "\u0000\u0000\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0271\u0001"+ - "\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ - "\u0000\u0000\u0000\u0275\u0279\u0003l0\u0000\u0276\u0278\u0003D\u001c"+ - "\u0000\u0277\u0276\u0001\u0000\u0000\u0000\u0278\u027b\u0001\u0000\u0000"+ - "\u0000\u0279\u0277\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000"+ - "\u0000\u027a\u029b\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000"+ - "\u0000\u027c\u027e\u0003l0\u0000\u027d\u027f\u0003D\u001c\u0000\u027e"+ - "\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280"+ - "\u027e\u0001\u0000\u0000\u0000\u0280\u0281\u0001\u0000\u0000\u0000\u0281"+ - "\u029b\u0001\u0000\u0000\u0000\u0282\u0284\u0003D\u001c\u0000\u0283\u0282"+ - "\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285\u0283"+ - "\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000\u0286\u028e"+ - "\u0001\u0000\u0000\u0000\u0287\u028b\u0003l0\u0000\u0288\u028a\u0003D"+ - "\u001c\u0000\u0289\u0288\u0001\u0000\u0000\u0000\u028a\u028d\u0001\u0000"+ - "\u0000\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028b\u028c\u0001\u0000"+ - "\u0000\u0000\u028c\u028f\u0001\u0000\u0000\u0000\u028d\u028b\u0001\u0000"+ - "\u0000\u0000\u028e\u0287\u0001\u0000\u0000\u0000\u028e\u028f\u0001\u0000"+ - "\u0000\u0000\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0003L \u0000"+ - "\u0291\u029b\u0001\u0000\u0000\u0000\u0292\u0294\u0003l0\u0000\u0293\u0295"+ - "\u0003D\u001c\u0000\u0294\u0293\u0001\u0000\u0000\u0000\u0295\u0296\u0001"+ - "\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000\u0296\u0297\u0001"+ - "\u0000\u0000\u0000\u0297\u0298\u0001\u0000\u0000\u0000\u0298\u0299\u0003"+ - "L \u0000\u0299\u029b\u0001\u0000\u0000\u0000\u029a\u0271\u0001\u0000\u0000"+ - "\u0000\u029a\u027c\u0001\u0000\u0000\u0000\u029a\u0283\u0001\u0000\u0000"+ - "\u0000\u029a\u0292\u0001\u0000\u0000\u0000\u029b]\u0001\u0000\u0000\u0000"+ - "\u029c\u029d\u0005b\u0000\u0000\u029d\u029e\u0005y\u0000\u0000\u029e_"+ - "\u0001\u0000\u0000\u0000\u029f\u02a0\u0005a\u0000\u0000\u02a0\u02a1\u0005"+ - "n\u0000\u0000\u02a1\u02a2\u0005d\u0000\u0000\u02a2a\u0001\u0000\u0000"+ - "\u0000\u02a3\u02a4\u0005a\u0000\u0000\u02a4\u02a5\u0005s\u0000\u0000\u02a5"+ - "\u02a6\u0005c\u0000\u0000\u02a6c\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005"+ - "=\u0000\u0000\u02a8e\u0001\u0000\u0000\u0000\u02a9\u02aa\u0005:\u0000"+ - "\u0000\u02aa\u02ab\u0005:\u0000\u0000\u02abg\u0001\u0000\u0000\u0000\u02ac"+ - "\u02ad\u0005,\u0000\u0000\u02adi\u0001\u0000\u0000\u0000\u02ae\u02af\u0005"+ - "d\u0000\u0000\u02af\u02b0\u0005e\u0000\u0000\u02b0\u02b1\u0005s\u0000"+ - "\u0000\u02b1\u02b2\u0005c\u0000\u0000\u02b2k\u0001\u0000\u0000\u0000\u02b3"+ - "\u02b4\u0005.\u0000\u0000\u02b4m\u0001\u0000\u0000\u0000\u02b5\u02b6\u0005"+ - "f\u0000\u0000\u02b6\u02b7\u0005a\u0000\u0000\u02b7\u02b8\u0005l\u0000"+ - "\u0000\u02b8\u02b9\u0005s\u0000\u0000\u02b9\u02ba\u0005e\u0000\u0000\u02ba"+ - "o\u0001\u0000\u0000\u0000\u02bb\u02bc\u0005f\u0000\u0000\u02bc\u02bd\u0005"+ - "i\u0000\u0000\u02bd\u02be\u0005r\u0000\u0000\u02be\u02bf\u0005s\u0000"+ - "\u0000\u02bf\u02c0\u0005t\u0000\u0000\u02c0q\u0001\u0000\u0000\u0000\u02c1"+ - "\u02c2\u0005l\u0000\u0000\u02c2\u02c3\u0005a\u0000\u0000\u02c3\u02c4\u0005"+ - "s\u0000\u0000\u02c4\u02c5\u0005t\u0000\u0000\u02c5s\u0001\u0000\u0000"+ - "\u0000\u02c6\u02c7\u0005(\u0000\u0000\u02c7u\u0001\u0000\u0000\u0000\u02c8"+ - "\u02c9\u0005i\u0000\u0000\u02c9\u02ca\u0005n\u0000\u0000\u02caw\u0001"+ - "\u0000\u0000\u0000\u02cb\u02cc\u0005i\u0000\u0000\u02cc\u02cd\u0005s\u0000"+ - "\u0000\u02cdy\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005l\u0000\u0000\u02cf"+ - "\u02d0\u0005i\u0000\u0000\u02d0\u02d1\u0005k\u0000\u0000\u02d1\u02d2\u0005"+ - "e\u0000\u0000\u02d2{\u0001\u0000\u0000\u0000\u02d3\u02d4\u0005n\u0000"+ - "\u0000\u02d4\u02d5\u0005o\u0000\u0000\u02d5\u02d6\u0005t\u0000\u0000\u02d6"+ - "}\u0001\u0000\u0000\u0000\u02d7\u02d8\u0005n\u0000\u0000\u02d8\u02d9\u0005"+ - "u\u0000\u0000\u02d9\u02da\u0005l\u0000\u0000\u02da\u02db\u0005l\u0000"+ - "\u0000\u02db\u007f\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005n\u0000\u0000"+ - "\u02dd\u02de\u0005u\u0000\u0000\u02de\u02df\u0005l\u0000\u0000\u02df\u02e0"+ - "\u0005l\u0000\u0000\u02e0\u02e1\u0005s\u0000\u0000\u02e1\u0081\u0001\u0000"+ - "\u0000\u0000\u02e2\u02e3\u0005o\u0000\u0000\u02e3\u02e4\u0005r\u0000\u0000"+ - "\u02e4\u0083\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005?\u0000\u0000\u02e6"+ - "\u0085\u0001\u0000\u0000\u0000\u02e7\u02e8\u0005r\u0000\u0000\u02e8\u02e9"+ - "\u0005l\u0000\u0000\u02e9\u02ea\u0005i\u0000\u0000\u02ea\u02eb\u0005k"+ - "\u0000\u0000\u02eb\u02ec\u0005e\u0000\u0000\u02ec\u0087\u0001\u0000\u0000"+ - "\u0000\u02ed\u02ee\u0005)\u0000\u0000\u02ee\u0089\u0001\u0000\u0000\u0000"+ - "\u02ef\u02f0\u0005t\u0000\u0000\u02f0\u02f1\u0005r\u0000\u0000\u02f1\u02f2"+ - "\u0005u\u0000\u0000\u02f2\u02f3\u0005e\u0000\u0000\u02f3\u008b\u0001\u0000"+ - "\u0000\u0000\u02f4\u02f5\u0005=\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000"+ - "\u02f6\u008d\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005=\u0000\u0000\u02f8"+ - "\u02f9\u0005~\u0000\u0000\u02f9\u008f\u0001\u0000\u0000\u0000\u02fa\u02fb"+ - "\u0005!\u0000\u0000\u02fb\u02fc\u0005=\u0000\u0000\u02fc\u0091\u0001\u0000"+ - "\u0000\u0000\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u0093\u0001\u0000\u0000"+ - "\u0000\u02ff\u0300\u0005<\u0000\u0000\u0300\u0301\u0005=\u0000\u0000\u0301"+ - "\u0095\u0001\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0097"+ - "\u0001\u0000\u0000\u0000\u0304\u0305\u0005>\u0000\u0000\u0305\u0306\u0005"+ - "=\u0000\u0000\u0306\u0099\u0001\u0000\u0000\u0000\u0307\u0308\u0005+\u0000"+ - "\u0000\u0308\u009b\u0001\u0000\u0000\u0000\u0309\u030a\u0005-\u0000\u0000"+ - "\u030a\u009d\u0001\u0000\u0000\u0000\u030b\u030c\u0005*\u0000\u0000\u030c"+ - "\u009f\u0001\u0000\u0000\u0000\u030d\u030e\u0005/\u0000\u0000\u030e\u00a1"+ - "\u0001\u0000\u0000\u0000\u030f\u0310\u0005%\u0000\u0000\u0310\u00a3\u0001"+ - "\u0000\u0000\u0000\u0311\u0312\u0005[\u0000\u0000\u0312\u0313\u0001\u0000"+ - "\u0000\u0000\u0313\u0314\u0006L\u0000\u0000\u0314\u0315\u0006L\u0000\u0000"+ - "\u0315\u00a5\u0001\u0000\u0000\u0000\u0316\u0317\u0005]\u0000\u0000\u0317"+ - "\u0318\u0001\u0000\u0000\u0000\u0318\u0319\u0006M\r\u0000\u0319\u031a"+ - "\u0006M\r\u0000\u031a\u00a7\u0001\u0000\u0000\u0000\u031b\u031f\u0003"+ - "F\u001d\u0000\u031c\u031e\u0003V%\u0000\u031d\u031c\u0001\u0000\u0000"+ - "\u0000\u031e\u0321\u0001\u0000\u0000\u0000\u031f\u031d\u0001\u0000\u0000"+ - "\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u032c\u0001\u0000\u0000"+ - "\u0000\u0321\u031f\u0001\u0000\u0000\u0000\u0322\u0325\u0003T$\u0000\u0323"+ - "\u0325\u0003N!\u0000\u0324\u0322\u0001\u0000\u0000\u0000\u0324\u0323\u0001"+ - "\u0000\u0000\u0000\u0325\u0327\u0001\u0000\u0000\u0000\u0326\u0328\u0003"+ - "V%\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u0329\u0001\u0000\u0000"+ - "\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000\u0000"+ - "\u0000\u032a\u032c\u0001\u0000\u0000\u0000\u032b\u031b\u0001\u0000\u0000"+ - "\u0000\u032b\u0324\u0001\u0000\u0000\u0000\u032c\u00a9\u0001\u0000\u0000"+ - "\u0000\u032d\u032f\u0003P\"\u0000\u032e\u0330\u0003R#\u0000\u032f\u032e"+ - "\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000\u0000\u0000\u0331\u032f"+ - "\u0001\u0000\u0000\u0000\u0331\u0332\u0001\u0000\u0000\u0000\u0332\u0333"+ - "\u0001\u0000\u0000\u0000\u0333\u0334\u0003P\"\u0000\u0334\u00ab\u0001"+ - "\u0000\u0000\u0000\u0335\u0336\u0003\u00aaO\u0000\u0336\u00ad\u0001\u0000"+ - "\u0000\u0000\u0337\u0338\u00032\u0013\u0000\u0338\u0339\u0001\u0000\u0000"+ - "\u0000\u0339\u033a\u0006Q\t\u0000\u033a\u00af\u0001\u0000\u0000\u0000"+ - "\u033b\u033c\u00034\u0014\u0000\u033c\u033d\u0001\u0000\u0000\u0000\u033d"+ - "\u033e\u0006R\t\u0000\u033e\u00b1\u0001\u0000\u0000\u0000\u033f\u0340"+ - "\u00036\u0015\u0000\u0340\u0341\u0001\u0000\u0000\u0000\u0341\u0342\u0006"+ - "S\t\u0000\u0342\u00b3\u0001\u0000\u0000\u0000\u0343\u0344\u0003B\u001b"+ - "\u0000\u0344\u0345\u0001\u0000\u0000\u0000\u0345\u0346\u0006T\f\u0000"+ - "\u0346\u0347\u0006T\r\u0000\u0347\u00b5\u0001\u0000\u0000\u0000\u0348"+ - "\u0349\u0003\u00a4L\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a\u034b"+ - "\u0006U\n\u0000\u034b\u00b7\u0001\u0000\u0000\u0000\u034c\u034d\u0003"+ - "\u00a6M\u0000\u034d\u034e\u0001\u0000\u0000\u0000\u034e\u034f\u0006V\u000e"+ - "\u0000\u034f\u00b9\u0001\u0000\u0000\u0000\u0350\u0351\u0003h.\u0000\u0351"+ - "\u0352\u0001\u0000\u0000\u0000\u0352\u0353\u0006W\u000f\u0000\u0353\u00bb"+ - "\u0001\u0000\u0000\u0000\u0354\u0355\u0003d,\u0000\u0355\u0356\u0001\u0000"+ - "\u0000\u0000\u0356\u0357\u0006X\u0010\u0000\u0357\u00bd\u0001\u0000\u0000"+ - "\u0000\u0358\u0359\u0003X&\u0000\u0359\u035a\u0001\u0000\u0000\u0000\u035a"+ - "\u035b\u0006Y\u0011\u0000\u035b\u00bf\u0001\u0000\u0000\u0000\u035c\u035d"+ - "\u0005o\u0000\u0000\u035d\u035e\u0005p\u0000\u0000\u035e\u035f\u0005t"+ - "\u0000\u0000\u035f\u0360\u0005i\u0000\u0000\u0360\u0361\u0005o\u0000\u0000"+ - "\u0361\u0362\u0005n\u0000\u0000\u0362\u0363\u0005s\u0000\u0000\u0363\u00c1"+ - "\u0001\u0000\u0000\u0000\u0364\u0365\u0005m\u0000\u0000\u0365\u0366\u0005"+ - "e\u0000\u0000\u0366\u0367\u0005t\u0000\u0000\u0367\u0368\u0005a\u0000"+ - "\u0000\u0368\u0369\u0005d\u0000\u0000\u0369\u036a\u0005a\u0000\u0000\u036a"+ - "\u036b\u0005t\u0000\u0000\u036b\u036c\u0005a\u0000\u0000\u036c\u00c3\u0001"+ - "\u0000\u0000\u0000\u036d\u0371\b\n\u0000\u0000\u036e\u036f\u0005/\u0000"+ - "\u0000\u036f\u0371\b\u000b\u0000\u0000\u0370\u036d\u0001\u0000\u0000\u0000"+ - "\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u00c5\u0001\u0000\u0000\u0000"+ - "\u0372\u0374\u0003\u00c4\\\u0000\u0373\u0372\u0001\u0000\u0000\u0000\u0374"+ - "\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375"+ - "\u0376\u0001\u0000\u0000\u0000\u0376\u00c7\u0001\u0000\u0000\u0000\u0377"+ - "\u0378\u0003\u00acP\u0000\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a"+ - "\u0006^\u0012\u0000\u037a\u00c9\u0001\u0000\u0000\u0000\u037b\u037c\u0003"+ - "2\u0013\u0000\u037c\u037d\u0001\u0000\u0000\u0000\u037d\u037e\u0006_\t"+ - "\u0000\u037e\u00cb\u0001\u0000\u0000\u0000\u037f\u0380\u00034\u0014\u0000"+ - "\u0380\u0381\u0001\u0000\u0000\u0000\u0381\u0382\u0006`\t\u0000\u0382"+ - "\u00cd\u0001\u0000\u0000\u0000\u0383\u0384\u00036\u0015\u0000\u0384\u0385"+ - "\u0001\u0000\u0000\u0000\u0385\u0386\u0006a\t\u0000\u0386\u00cf\u0001"+ - "\u0000\u0000\u0000\u0387\u0388\u0003B\u001b\u0000\u0388\u0389\u0001\u0000"+ - "\u0000\u0000\u0389\u038a\u0006b\f\u0000\u038a\u038b\u0006b\r\u0000\u038b"+ - "\u00d1\u0001\u0000\u0000\u0000\u038c\u038d\u0003l0\u0000\u038d\u038e\u0001"+ - "\u0000\u0000\u0000\u038e\u038f\u0006c\u0013\u0000\u038f\u00d3\u0001\u0000"+ - "\u0000\u0000\u0390\u0391\u0003h.\u0000\u0391\u0392\u0001\u0000\u0000\u0000"+ - "\u0392\u0393\u0006d\u000f\u0000\u0393\u00d5\u0001\u0000\u0000\u0000\u0394"+ - "\u0399\u0003F\u001d\u0000\u0395\u0399\u0003D\u001c\u0000\u0396\u0399\u0003"+ - "T$\u0000\u0397\u0399\u0003\u009eI\u0000\u0398\u0394\u0001\u0000\u0000"+ - "\u0000\u0398\u0395\u0001\u0000\u0000\u0000\u0398\u0396\u0001\u0000\u0000"+ - "\u0000\u0398\u0397\u0001\u0000\u0000\u0000\u0399\u00d7\u0001\u0000\u0000"+ - "\u0000\u039a\u039d\u0003F\u001d\u0000\u039b\u039d\u0003\u009eI\u0000\u039c"+ - "\u039a\u0001\u0000\u0000\u0000\u039c\u039b\u0001\u0000\u0000\u0000\u039d"+ - "\u03a1\u0001\u0000\u0000\u0000\u039e\u03a0\u0003\u00d6e\u0000\u039f\u039e"+ - "\u0001\u0000\u0000\u0000\u03a0\u03a3\u0001\u0000\u0000\u0000\u03a1\u039f"+ - "\u0001\u0000\u0000\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03ae"+ - "\u0001\u0000\u0000\u0000\u03a3\u03a1\u0001\u0000\u0000\u0000\u03a4\u03a7"+ - "\u0003T$\u0000\u03a5\u03a7\u0003N!\u0000\u03a6\u03a4\u0001\u0000\u0000"+ - "\u0000\u03a6\u03a5\u0001\u0000\u0000\u0000\u03a7\u03a9\u0001\u0000\u0000"+ - "\u0000\u03a8\u03aa\u0003\u00d6e\u0000\u03a9\u03a8\u0001\u0000\u0000\u0000"+ - "\u03aa\u03ab\u0001\u0000\u0000\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000"+ - "\u03ab\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ae\u0001\u0000\u0000\u0000"+ - "\u03ad\u039c\u0001\u0000\u0000\u0000\u03ad\u03a6\u0001\u0000\u0000\u0000"+ - "\u03ae\u00d9\u0001\u0000\u0000\u0000\u03af\u03b2\u0003\u00d8f\u0000\u03b0"+ - "\u03b2\u0003\u00aaO\u0000\u03b1\u03af\u0001\u0000\u0000\u0000\u03b1\u03b0"+ - "\u0001\u0000\u0000\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b1"+ - "\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u00db"+ - "\u0001\u0000\u0000\u0000\u03b5\u03b6\u00032\u0013\u0000\u03b6\u03b7\u0001"+ - "\u0000\u0000\u0000\u03b7\u03b8\u0006h\t\u0000\u03b8\u00dd\u0001\u0000"+ - "\u0000\u0000\u03b9\u03ba\u00034\u0014\u0000\u03ba\u03bb\u0001\u0000\u0000"+ - "\u0000\u03bb\u03bc\u0006i\t\u0000\u03bc\u00df\u0001\u0000\u0000\u0000"+ - "\u03bd\u03be\u00036\u0015\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf"+ - "\u03c0\u0006j\t\u0000\u03c0\u00e1\u0001\u0000\u0000\u0000\u03c1\u03c2"+ - "\u0003B\u001b\u0000\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006"+ - "k\f\u0000\u03c4\u03c5\u0006k\r\u0000\u03c5\u00e3\u0001\u0000\u0000\u0000"+ - "\u03c6\u03c7\u0003d,\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9"+ - "\u0006l\u0010\u0000\u03c9\u00e5\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003"+ - "h.\u0000\u03cb\u03cc\u0001\u0000\u0000\u0000\u03cc\u03cd\u0006m\u000f"+ - "\u0000\u03cd\u00e7\u0001\u0000\u0000\u0000\u03ce\u03cf\u0003l0\u0000\u03cf"+ - "\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006n\u0013\u0000\u03d1\u00e9"+ - "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0005a\u0000\u0000\u03d3\u03d4\u0005"+ - "s\u0000\u0000\u03d4\u00eb\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003\u00da"+ - "g\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006p\u0014\u0000"+ - "\u03d8\u00ed\u0001\u0000\u0000\u0000\u03d9\u03da\u00032\u0013\u0000\u03da"+ - "\u03db\u0001\u0000\u0000\u0000\u03db\u03dc\u0006q\t\u0000\u03dc\u00ef"+ - "\u0001\u0000\u0000\u0000\u03dd\u03de\u00034\u0014\u0000\u03de\u03df\u0001"+ - "\u0000\u0000\u0000\u03df\u03e0\u0006r\t\u0000\u03e0\u00f1\u0001\u0000"+ - "\u0000\u0000\u03e1\u03e2\u00036\u0015\u0000\u03e2\u03e3\u0001\u0000\u0000"+ - "\u0000\u03e3\u03e4\u0006s\t\u0000\u03e4\u00f3\u0001\u0000\u0000\u0000"+ - "\u03e5\u03e6\u0003B\u001b\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7"+ - "\u03e8\u0006t\f\u0000\u03e8\u03e9\u0006t\r\u0000\u03e9\u00f5\u0001\u0000"+ - "\u0000\u0000\u03ea\u03eb\u0003\u00a4L\u0000\u03eb\u03ec\u0001\u0000\u0000"+ - "\u0000\u03ec\u03ed\u0006u\n\u0000\u03ed\u03ee\u0006u\u0015\u0000\u03ee"+ - "\u00f7\u0001\u0000\u0000\u0000\u03ef\u03f0\u0005o\u0000\u0000\u03f0\u03f1"+ - "\u0005n\u0000\u0000\u03f1\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006"+ - "v\u0016\u0000\u03f3\u00f9\u0001\u0000\u0000\u0000\u03f4\u03f5\u0005w\u0000"+ - "\u0000\u03f5\u03f6\u0005i\u0000\u0000\u03f6\u03f7\u0005t\u0000\u0000\u03f7"+ - "\u03f8\u0005h\u0000\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u0006w\u0016\u0000\u03fa\u00fb\u0001\u0000\u0000\u0000\u03fb\u03fc\b"+ - "\f\u0000\u0000\u03fc\u00fd\u0001\u0000\u0000\u0000\u03fd\u03ff\u0003\u00fc"+ - "x\u0000\u03fe\u03fd\u0001\u0000\u0000\u0000\u03ff\u0400\u0001\u0000\u0000"+ - "\u0000\u0400\u03fe\u0001\u0000\u0000\u0000\u0400\u0401\u0001\u0000\u0000"+ - "\u0000\u0401\u0402\u0001\u0000\u0000\u0000\u0402\u0403\u0003\u0142\u009b"+ - "\u0000\u0403\u0405\u0001\u0000\u0000\u0000\u0404\u03fe\u0001\u0000\u0000"+ - "\u0000\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0407\u0001\u0000\u0000"+ - "\u0000\u0406\u0408\u0003\u00fcx\u0000\u0407\u0406\u0001\u0000\u0000\u0000"+ - "\u0408\u0409\u0001\u0000\u0000\u0000\u0409\u0407\u0001\u0000\u0000\u0000"+ - "\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u00ff\u0001\u0000\u0000\u0000"+ - "\u040b\u040c\u0003\u00acP\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d"+ - "\u040e\u0006z\u0012\u0000\u040e\u0101\u0001\u0000\u0000\u0000\u040f\u0410"+ - "\u0003\u00fey\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006"+ - "{\u0017\u0000\u0412\u0103\u0001\u0000\u0000\u0000\u0413\u0414\u00032\u0013"+ - "\u0000\u0414\u0415\u0001\u0000\u0000\u0000\u0415\u0416\u0006|\t\u0000"+ - "\u0416\u0105\u0001\u0000\u0000\u0000\u0417\u0418\u00034\u0014\u0000\u0418"+ - "\u0419\u0001\u0000\u0000\u0000\u0419\u041a\u0006}\t\u0000\u041a\u0107"+ - "\u0001\u0000\u0000\u0000\u041b\u041c\u00036\u0015\u0000\u041c\u041d\u0001"+ - "\u0000\u0000\u0000\u041d\u041e\u0006~\t\u0000\u041e\u0109\u0001\u0000"+ - "\u0000\u0000\u041f\u0420\u0003B\u001b\u0000\u0420\u0421\u0001\u0000\u0000"+ - "\u0000\u0421\u0422\u0006\u007f\f\u0000\u0422\u0423\u0006\u007f\r\u0000"+ - "\u0423\u0424\u0006\u007f\r\u0000\u0424\u010b\u0001\u0000\u0000\u0000\u0425"+ - "\u0426\u0003d,\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427\u0428\u0006"+ - "\u0080\u0010\u0000\u0428\u010d\u0001\u0000\u0000\u0000\u0429\u042a\u0003"+ - "h.\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006\u0081\u000f"+ - "\u0000\u042c\u010f\u0001\u0000\u0000\u0000\u042d\u042e\u0003l0\u0000\u042e"+ - "\u042f\u0001\u0000\u0000\u0000\u042f\u0430\u0006\u0082\u0013\u0000\u0430"+ - "\u0111\u0001\u0000\u0000\u0000\u0431\u0432\u0003\u00faw\u0000\u0432\u0433"+ - "\u0001\u0000\u0000\u0000\u0433\u0434\u0006\u0083\u0018\u0000\u0434\u0113"+ - "\u0001\u0000\u0000\u0000\u0435\u0436\u0003\u00dag\u0000\u0436\u0437\u0001"+ - "\u0000\u0000\u0000\u0437\u0438\u0006\u0084\u0014\u0000\u0438\u0115\u0001"+ - "\u0000\u0000\u0000\u0439\u043a\u0003\u00acP\u0000\u043a\u043b\u0001\u0000"+ - "\u0000\u0000\u043b\u043c\u0006\u0085\u0012\u0000\u043c\u0117\u0001\u0000"+ - "\u0000\u0000\u043d\u043e\u00032\u0013\u0000\u043e\u043f\u0001\u0000\u0000"+ - "\u0000\u043f\u0440\u0006\u0086\t\u0000\u0440\u0119\u0001\u0000\u0000\u0000"+ - "\u0441\u0442\u00034\u0014\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ - "\u0444\u0006\u0087\t\u0000\u0444\u011b\u0001\u0000\u0000\u0000\u0445\u0446"+ - "\u00036\u0015\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ - "\u0088\t\u0000\u0448\u011d\u0001\u0000\u0000\u0000\u0449\u044a\u0003B"+ - "\u001b\u0000\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u0089"+ - "\f\u0000\u044c\u044d\u0006\u0089\r\u0000\u044d\u011f\u0001\u0000\u0000"+ - "\u0000\u044e\u044f\u0003l0\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450"+ - "\u0451\u0006\u008a\u0013\u0000\u0451\u0121\u0001\u0000\u0000\u0000\u0452"+ - "\u0453\u0003\u00acP\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455"+ - "\u0006\u008b\u0012\u0000\u0455\u0123\u0001\u0000\u0000\u0000\u0456\u0457"+ - "\u0003\u00a8N\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006"+ - "\u008c\u0019\u0000\u0459\u0125\u0001\u0000\u0000\u0000\u045a\u045b\u0003"+ - "2\u0013\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c\u045d\u0006\u008d"+ - "\t\u0000\u045d\u0127\u0001\u0000\u0000\u0000\u045e\u045f\u00034\u0014"+ - "\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006\u008e\t\u0000"+ - "\u0461\u0129\u0001\u0000\u0000\u0000\u0462\u0463\u00036\u0015\u0000\u0463"+ - "\u0464\u0001\u0000\u0000\u0000\u0464\u0465\u0006\u008f\t\u0000\u0465\u012b"+ - "\u0001\u0000\u0000\u0000\u0466\u0467\u0003B\u001b\u0000\u0467\u0468\u0001"+ - "\u0000\u0000\u0000\u0468\u0469\u0006\u0090\f\u0000\u0469\u046a\u0006\u0090"+ - "\r\u0000\u046a\u012d\u0001\u0000\u0000\u0000\u046b\u046c\u0005i\u0000"+ - "\u0000\u046c\u046d\u0005n\u0000\u0000\u046d\u046e\u0005f\u0000\u0000\u046e"+ - "\u046f\u0005o\u0000\u0000\u046f\u012f\u0001\u0000\u0000\u0000\u0470\u0471"+ - "\u00032\u0013\u0000\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0473\u0006"+ - "\u0092\t\u0000\u0473\u0131\u0001\u0000\u0000\u0000\u0474\u0475\u00034"+ - "\u0014\u0000\u0475\u0476\u0001\u0000\u0000\u0000\u0476\u0477\u0006\u0093"+ - "\t\u0000\u0477\u0133\u0001\u0000\u0000\u0000\u0478\u0479\u00036\u0015"+ - "\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b\u0006\u0094\t\u0000"+ - "\u047b\u0135\u0001\u0000\u0000\u0000\u047c\u047d\u0003B\u001b\u0000\u047d"+ - "\u047e\u0001\u0000\u0000\u0000\u047e\u047f\u0006\u0095\f\u0000\u047f\u0480"+ - "\u0006\u0095\r\u0000\u0480\u0137\u0001\u0000\u0000\u0000\u0481\u0482\u0005"+ - "f\u0000\u0000\u0482\u0483\u0005u\u0000\u0000\u0483\u0484\u0005n\u0000"+ - "\u0000\u0484\u0485\u0005c\u0000\u0000\u0485\u0486\u0005t\u0000\u0000\u0486"+ - "\u0487\u0005i\u0000\u0000\u0487\u0488\u0005o\u0000\u0000\u0488\u0489\u0005"+ - "n\u0000\u0000\u0489\u048a\u0005s\u0000\u0000\u048a\u0139\u0001\u0000\u0000"+ - "\u0000\u048b\u048c\u00032\u0013\u0000\u048c\u048d\u0001\u0000\u0000\u0000"+ - "\u048d\u048e\u0006\u0097\t\u0000\u048e\u013b\u0001\u0000\u0000\u0000\u048f"+ - "\u0490\u00034\u0014\u0000\u0490\u0491\u0001\u0000\u0000\u0000\u0491\u0492"+ - "\u0006\u0098\t\u0000\u0492\u013d\u0001\u0000\u0000\u0000\u0493\u0494\u0003"+ - "6\u0015\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006\u0099"+ - "\t\u0000\u0496\u013f\u0001\u0000\u0000\u0000\u0497\u0498\u0003\u00a6M"+ - "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006\u009a\u000e"+ - "\u0000\u049a\u049b\u0006\u009a\r\u0000\u049b\u0141\u0001\u0000\u0000\u0000"+ - "\u049c\u049d\u0005:\u0000\u0000\u049d\u0143\u0001\u0000\u0000\u0000\u049e"+ - "\u04a4\u0003N!\u0000\u049f\u04a4\u0003D\u001c\u0000\u04a0\u04a4\u0003"+ - "l0\u0000\u04a1\u04a4\u0003F\u001d\u0000\u04a2\u04a4\u0003T$\u0000\u04a3"+ - "\u049e\u0001\u0000\u0000\u0000\u04a3\u049f\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a0\u0001\u0000\u0000\u0000\u04a3\u04a1\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a2\u0001\u0000\u0000\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5"+ - "\u04a3\u0001\u0000\u0000\u0000\u04a5\u04a6\u0001\u0000\u0000\u0000\u04a6"+ - "\u0145\u0001\u0000\u0000\u0000\u04a7\u04a8\u00032\u0013\u0000\u04a8\u04a9"+ - "\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006\u009d\t\u0000\u04aa\u0147\u0001"+ - "\u0000\u0000\u0000\u04ab\u04ac\u00034\u0014\u0000\u04ac\u04ad\u0001\u0000"+ - "\u0000\u0000\u04ad\u04ae\u0006\u009e\t\u0000\u04ae\u0149\u0001\u0000\u0000"+ - "\u0000\u04af\u04b0\u00036\u0015\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000"+ - "\u04b1\u04b2\u0006\u009f\t\u0000\u04b2\u014b\u0001\u0000\u0000\u0000:"+ - "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e5\u01ef"+ - "\u01f3\u01f6\u01ff\u0201\u020c\u0235\u023a\u0243\u024a\u024f\u0251\u025c"+ - "\u0264\u0267\u0269\u026e\u0273\u0279\u0280\u0285\u028b\u028e\u0296\u029a"+ - "\u031f\u0324\u0329\u032b\u0331\u0370\u0375\u0398\u039c\u03a1\u03a6\u03ab"+ - "\u03ad\u03b1\u03b3\u0400\u0404\u0409\u04a3\u04a5\u001a\u0005\u0002\u0000"+ - "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ - "\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001"+ - "\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000"+ - "\u0007B\u0000\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007D\u0000"+ - "\u0007%\u0000\u0007N\u0000\u0005\u000b\u0000\u0005\u0007\u0000\u0007X"+ - "\u0000\u0007W\u0000\u0007C\u0000"; + "\u0012\u0004\u0012\u01e2\b\u0012\u000b\u0012\f\u0012\u01e3\u0001\u0012"+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013"+ + "\u01ec\b\u0013\n\u0013\f\u0013\u01ef\t\u0013\u0001\u0013\u0003\u0013\u01f2"+ + "\b\u0013\u0001\u0013\u0003\u0013\u01f5\b\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014"+ + "\u01fe\b\u0014\n\u0014\f\u0014\u0201\t\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004\u0015\u0209\b\u0015\u000b"+ + "\u0015\f\u0015\u020a\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ + "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0003 \u0234\b \u0001 \u0004"+ + " \u0237\b \u000b \f \u0238\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001"+ + "#\u0001#\u0003#\u0242\b#\u0001$\u0001$\u0001%\u0001%\u0001%\u0003%\u0249"+ + "\b%\u0001&\u0001&\u0001&\u0005&\u024e\b&\n&\f&\u0251\t&\u0001&\u0001&"+ + "\u0001&\u0001&\u0001&\u0001&\u0005&\u0259\b&\n&\f&\u025c\t&\u0001&\u0001"+ + "&\u0001&\u0001&\u0001&\u0003&\u0263\b&\u0001&\u0003&\u0266\b&\u0003&\u0268"+ + "\b&\u0001\'\u0004\'\u026b\b\'\u000b\'\f\'\u026c\u0001(\u0004(\u0270\b"+ + "(\u000b(\f(\u0271\u0001(\u0001(\u0005(\u0276\b(\n(\f(\u0279\t(\u0001("+ + "\u0001(\u0004(\u027d\b(\u000b(\f(\u027e\u0001(\u0004(\u0282\b(\u000b("+ + "\f(\u0283\u0001(\u0001(\u0005(\u0288\b(\n(\f(\u028b\t(\u0003(\u028d\b"+ + "(\u0001(\u0001(\u0001(\u0001(\u0004(\u0293\b(\u000b(\f(\u0294\u0001(\u0001"+ + "(\u0003(\u0299\b(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0001"+ + "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001.\u0001"+ + ".\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u00011\u00011\u0001"+ + "1\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00012\u0001"+ + "3\u00013\u00013\u00013\u00013\u00014\u00014\u00015\u00015\u00015\u0001"+ + "6\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u00018\u0001"+ + "8\u00018\u00019\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001"+ + ":\u0001:\u0001:\u0001;\u0001;\u0001;\u0001<\u0001<\u0001=\u0001=\u0001"+ + "=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001"+ + "C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001"+ + "G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001"+ + "L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001"+ + "N\u0001N\u0005N\u031c\bN\nN\fN\u031f\tN\u0001N\u0001N\u0003N\u0323\bN"+ + "\u0001N\u0004N\u0326\bN\u000bN\fN\u0327\u0003N\u032a\bN\u0001O\u0001O"+ + "\u0004O\u032e\bO\u000bO\fO\u032f\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001"+ + "Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001"+ + "S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001"+ + "V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001"+ + "X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ + "Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001"+ + "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003\\\u036f\b\\\u0001]\u0004"+ + "]\u0372\b]\u000b]\f]\u0373\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ + "_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001"+ + "a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001"+ + "d\u0001d\u0001d\u0003d\u0393\bd\u0001e\u0001e\u0003e\u0397\be\u0001e\u0005"+ + "e\u039a\be\ne\fe\u039d\te\u0001e\u0001e\u0003e\u03a1\be\u0001e\u0004e"+ + "\u03a4\be\u000be\fe\u03a5\u0003e\u03a8\be\u0001f\u0001f\u0004f\u03ac\b"+ + "f\u000bf\ff\u03ad\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001"+ + "h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001j\u0001"+ + "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ + "m\u0001m\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ + "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001"+ + "r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ + "t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001"+ + "v\u0001v\u0001v\u0001w\u0001w\u0001x\u0004x\u03f9\bx\u000bx\fx\u03fa\u0001"+ + "x\u0001x\u0003x\u03ff\bx\u0001x\u0004x\u0402\bx\u000bx\fx\u0403\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001"+ + "{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001"+ + "~\u0001~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ + "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ + "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ + "\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084"+ + "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085"+ + "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087"+ + "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088"+ + "\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089"+ + "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b"+ + "\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c"+ + "\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e"+ + "\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f"+ + "\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ + "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092"+ + "\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093"+ + "\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098"+ + "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099"+ + "\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ + "\u0001\u009b\u0001\u009b\u0001\u009b\u0004\u009b\u049e\b\u009b\u000b\u009b"+ + "\f\u009b\u049f\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d"+ + "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e"+ + "\u0001\u009e\u0002\u01ff\u025a\u0000\u009f\f\u0001\u000e\u0002\u0010\u0003"+ + "\u0012\u0004\u0014\u0005\u0016\u0006\u0018\u0007\u001a\b\u001c\t\u001e"+ + "\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011.\u00120\u00132\u00144\u0015"+ + "6\u00168\u0000:\u0000<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000H\u0000"+ + "J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c\\\u001d"+ + "^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u0086"+ + "2\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a"+ + "<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aa\u0000\u00acD"+ + "\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6\u0000\u00b8\u0000\u00ba\u0000"+ + "\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4\u0000\u00c6J\u00c8K\u00ca"+ + "L\u00ccM\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000\u00d6\u0000\u00d8"+ + "N\u00daO\u00dcP\u00deQ\u00e0\u0000\u00e2\u0000\u00e4\u0000\u00e6\u0000"+ + "\u00e8R\u00ea\u0000\u00ecS\u00eeT\u00f0U\u00f2\u0000\u00f4\u0000\u00f6"+ + "V\u00f8W\u00fa\u0000\u00fcX\u00fe\u0000\u0100\u0000\u0102Y\u0104Z\u0106"+ + "[\u0108\u0000\u010a\u0000\u010c\u0000\u010e\u0000\u0110\u0000\u0112\u0000"+ + "\u0114\u0000\u0116\\\u0118]\u011a^\u011c\u0000\u011e\u0000\u0120\u0000"+ + "\u0122\u0000\u0124_\u0126`\u0128a\u012a\u0000\u012cb\u012ec\u0130d\u0132"+ + "e\u0134\u0000\u0136f\u0138g\u013ah\u013ci\u013e\u0000\u0140j\u0142k\u0144"+ + "l\u0146m\u0148n\f\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t"+ + "\n\u000b\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ + "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004"+ + "\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n"+ + "\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \""+ + "#,,//::<<>?\\\\||\u04c8\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001"+ + "\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"+ + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"+ + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"+ + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"+ + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"+ + "\u00006\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000\u0000\u0001:"+ + "\u0001\u0000\u0000\u0000\u0001<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000"+ + "\u0000\u0000\u0001@\u0001\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000"+ + "\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\"+ + "\u0001\u0000\u0000\u0000\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000"+ + "\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000"+ + "\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j"+ + "\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000"+ + "\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000"+ + "\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x"+ + "\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000"+ + "\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000"+ + "\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000"+ + "\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000"+ + "\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000"+ + "\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000"+ + "\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000"+ + "\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000"+ + "\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000"+ + "\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000"+ + "\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000"+ + "\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000"+ + "\u0000\u0002\u00ac\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000"+ + "\u0000\u0002\u00b0\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000"+ + "\u0000\u0003\u00b4\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000"+ + "\u0000\u0003\u00b8\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000"+ + "\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000"+ + "\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000"+ + "\u0000\u0003\u00c6\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000"+ + "\u0000\u0003\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000"+ + "\u0000\u0004\u00ce\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000"+ + "\u0000\u0004\u00d2\u0001\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000"+ + "\u0000\u0004\u00da\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000"+ + "\u0000\u0004\u00de\u0001\u0000\u0000\u0000\u0005\u00e0\u0001\u0000\u0000"+ + "\u0000\u0005\u00e2\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000"+ + "\u0000\u0005\u00e6\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000"+ + "\u0000\u0005\u00ea\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000"+ + "\u0000\u0005\u00ee\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000"+ + "\u0000\u0006\u00f2\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000"+ + "\u0000\u0006\u00f6\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000"+ + "\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000"+ + "\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000"+ + "\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000"+ + "\u0000\u0007\u0108\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000"+ + "\u0000\u0007\u010c\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000"+ + "\u0000\u0007\u0110\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000"+ + "\u0000\u0007\u0114\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000"+ + "\u0000\u0007\u0118\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000"+ + "\u0000\b\u011c\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000"+ + "\b\u0120\u0001\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124"+ + "\u0001\u0000\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001"+ + "\u0000\u0000\u0000\t\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000"+ + "\u0000\u0000\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000"+ + "\u0000\t\u0132\u0001\u0000\u0000\u0000\n\u0134\u0001\u0000\u0000\u0000"+ + "\n\u0136\u0001\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a"+ + "\u0001\u0000\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\u000b\u013e\u0001"+ + "\u0000\u0000\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001"+ + "\u0000\u0000\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001"+ + "\u0000\u0000\u0000\u000b\u0148\u0001\u0000\u0000\u0000\f\u014a\u0001\u0000"+ + "\u0000\u0000\u000e\u0154\u0001\u0000\u0000\u0000\u0010\u015b\u0001\u0000"+ + "\u0000\u0000\u0012\u0164\u0001\u0000\u0000\u0000\u0014\u016b\u0001\u0000"+ + "\u0000\u0000\u0016\u0175\u0001\u0000\u0000\u0000\u0018\u017c\u0001\u0000"+ + "\u0000\u0000\u001a\u0183\u0001\u0000\u0000\u0000\u001c\u0191\u0001\u0000"+ + "\u0000\u0000\u001e\u0198\u0001\u0000\u0000\u0000 \u01a0\u0001\u0000\u0000"+ + "\u0000\"\u01a7\u0001\u0000\u0000\u0000$\u01b3\u0001\u0000\u0000\u0000"+ + "&\u01bc\u0001\u0000\u0000\u0000(\u01c2\u0001\u0000\u0000\u0000*\u01c9"+ + "\u0001\u0000\u0000\u0000,\u01d0\u0001\u0000\u0000\u0000.\u01d8\u0001\u0000"+ + "\u0000\u00000\u01e1\u0001\u0000\u0000\u00002\u01e7\u0001\u0000\u0000\u0000"+ + "4\u01f8\u0001\u0000\u0000\u00006\u0208\u0001\u0000\u0000\u00008\u020e"+ + "\u0001\u0000\u0000\u0000:\u0213\u0001\u0000\u0000\u0000<\u0218\u0001\u0000"+ + "\u0000\u0000>\u021c\u0001\u0000\u0000\u0000@\u0220\u0001\u0000\u0000\u0000"+ + "B\u0224\u0001\u0000\u0000\u0000D\u0228\u0001\u0000\u0000\u0000F\u022a"+ + "\u0001\u0000\u0000\u0000H\u022c\u0001\u0000\u0000\u0000J\u022f\u0001\u0000"+ + "\u0000\u0000L\u0231\u0001\u0000\u0000\u0000N\u023a\u0001\u0000\u0000\u0000"+ + "P\u023c\u0001\u0000\u0000\u0000R\u0241\u0001\u0000\u0000\u0000T\u0243"+ + "\u0001\u0000\u0000\u0000V\u0248\u0001\u0000\u0000\u0000X\u0267\u0001\u0000"+ + "\u0000\u0000Z\u026a\u0001\u0000\u0000\u0000\\\u0298\u0001\u0000\u0000"+ + "\u0000^\u029a\u0001\u0000\u0000\u0000`\u029d\u0001\u0000\u0000\u0000b"+ + "\u02a1\u0001\u0000\u0000\u0000d\u02a5\u0001\u0000\u0000\u0000f\u02a7\u0001"+ + "\u0000\u0000\u0000h\u02aa\u0001\u0000\u0000\u0000j\u02ac\u0001\u0000\u0000"+ + "\u0000l\u02b1\u0001\u0000\u0000\u0000n\u02b3\u0001\u0000\u0000\u0000p"+ + "\u02b9\u0001\u0000\u0000\u0000r\u02bf\u0001\u0000\u0000\u0000t\u02c4\u0001"+ + "\u0000\u0000\u0000v\u02c6\u0001\u0000\u0000\u0000x\u02c9\u0001\u0000\u0000"+ + "\u0000z\u02cc\u0001\u0000\u0000\u0000|\u02d1\u0001\u0000\u0000\u0000~"+ + "\u02d5\u0001\u0000\u0000\u0000\u0080\u02da\u0001\u0000\u0000\u0000\u0082"+ + "\u02e0\u0001\u0000\u0000\u0000\u0084\u02e3\u0001\u0000\u0000\u0000\u0086"+ + "\u02e5\u0001\u0000\u0000\u0000\u0088\u02eb\u0001\u0000\u0000\u0000\u008a"+ + "\u02ed\u0001\u0000\u0000\u0000\u008c\u02f2\u0001\u0000\u0000\u0000\u008e"+ + "\u02f5\u0001\u0000\u0000\u0000\u0090\u02f8\u0001\u0000\u0000\u0000\u0092"+ + "\u02fb\u0001\u0000\u0000\u0000\u0094\u02fd\u0001\u0000\u0000\u0000\u0096"+ + "\u0300\u0001\u0000\u0000\u0000\u0098\u0302\u0001\u0000\u0000\u0000\u009a"+ + "\u0305\u0001\u0000\u0000\u0000\u009c\u0307\u0001\u0000\u0000\u0000\u009e"+ + "\u0309\u0001\u0000\u0000\u0000\u00a0\u030b\u0001\u0000\u0000\u0000\u00a2"+ + "\u030d\u0001\u0000\u0000\u0000\u00a4\u030f\u0001\u0000\u0000\u0000\u00a6"+ + "\u0314\u0001\u0000\u0000\u0000\u00a8\u0329\u0001\u0000\u0000\u0000\u00aa"+ + "\u032b\u0001\u0000\u0000\u0000\u00ac\u0333\u0001\u0000\u0000\u0000\u00ae"+ + "\u0335\u0001\u0000\u0000\u0000\u00b0\u0339\u0001\u0000\u0000\u0000\u00b2"+ + "\u033d\u0001\u0000\u0000\u0000\u00b4\u0341\u0001\u0000\u0000\u0000\u00b6"+ + "\u0346\u0001\u0000\u0000\u0000\u00b8\u034a\u0001\u0000\u0000\u0000\u00ba"+ + "\u034e\u0001\u0000\u0000\u0000\u00bc\u0352\u0001\u0000\u0000\u0000\u00be"+ + "\u0356\u0001\u0000\u0000\u0000\u00c0\u035a\u0001\u0000\u0000\u0000\u00c2"+ + "\u0362\u0001\u0000\u0000\u0000\u00c4\u036e\u0001\u0000\u0000\u0000\u00c6"+ + "\u0371\u0001\u0000\u0000\u0000\u00c8\u0375\u0001\u0000\u0000\u0000\u00ca"+ + "\u0379\u0001\u0000\u0000\u0000\u00cc\u037d\u0001\u0000\u0000\u0000\u00ce"+ + "\u0381\u0001\u0000\u0000\u0000\u00d0\u0386\u0001\u0000\u0000\u0000\u00d2"+ + "\u038a\u0001\u0000\u0000\u0000\u00d4\u0392\u0001\u0000\u0000\u0000\u00d6"+ + "\u03a7\u0001\u0000\u0000\u0000\u00d8\u03ab\u0001\u0000\u0000\u0000\u00da"+ + "\u03af\u0001\u0000\u0000\u0000\u00dc\u03b3\u0001\u0000\u0000\u0000\u00de"+ + "\u03b7\u0001\u0000\u0000\u0000\u00e0\u03bb\u0001\u0000\u0000\u0000\u00e2"+ + "\u03c0\u0001\u0000\u0000\u0000\u00e4\u03c4\u0001\u0000\u0000\u0000\u00e6"+ + "\u03c8\u0001\u0000\u0000\u0000\u00e8\u03cc\u0001\u0000\u0000\u0000\u00ea"+ + "\u03cf\u0001\u0000\u0000\u0000\u00ec\u03d3\u0001\u0000\u0000\u0000\u00ee"+ + "\u03d7\u0001\u0000\u0000\u0000\u00f0\u03db\u0001\u0000\u0000\u0000\u00f2"+ + "\u03df\u0001\u0000\u0000\u0000\u00f4\u03e4\u0001\u0000\u0000\u0000\u00f6"+ + "\u03e9\u0001\u0000\u0000\u0000\u00f8\u03ee\u0001\u0000\u0000\u0000\u00fa"+ + "\u03f5\u0001\u0000\u0000\u0000\u00fc\u03fe\u0001\u0000\u0000\u0000\u00fe"+ + "\u0405\u0001\u0000\u0000\u0000\u0100\u0409\u0001\u0000\u0000\u0000\u0102"+ + "\u040d\u0001\u0000\u0000\u0000\u0104\u0411\u0001\u0000\u0000\u0000\u0106"+ + "\u0415\u0001\u0000\u0000\u0000\u0108\u0419\u0001\u0000\u0000\u0000\u010a"+ + "\u041f\u0001\u0000\u0000\u0000\u010c\u0423\u0001\u0000\u0000\u0000\u010e"+ + "\u0427\u0001\u0000\u0000\u0000\u0110\u042b\u0001\u0000\u0000\u0000\u0112"+ + "\u042f\u0001\u0000\u0000\u0000\u0114\u0433\u0001\u0000\u0000\u0000\u0116"+ + "\u0437\u0001\u0000\u0000\u0000\u0118\u043b\u0001\u0000\u0000\u0000\u011a"+ + "\u043f\u0001\u0000\u0000\u0000\u011c\u0443\u0001\u0000\u0000\u0000\u011e"+ + "\u0448\u0001\u0000\u0000\u0000\u0120\u044c\u0001\u0000\u0000\u0000\u0122"+ + "\u0450\u0001\u0000\u0000\u0000\u0124\u0454\u0001\u0000\u0000\u0000\u0126"+ + "\u0458\u0001\u0000\u0000\u0000\u0128\u045c\u0001\u0000\u0000\u0000\u012a"+ + "\u0460\u0001\u0000\u0000\u0000\u012c\u0465\u0001\u0000\u0000\u0000\u012e"+ + "\u046a\u0001\u0000\u0000\u0000\u0130\u046e\u0001\u0000\u0000\u0000\u0132"+ + "\u0472\u0001\u0000\u0000\u0000\u0134\u0476\u0001\u0000\u0000\u0000\u0136"+ + "\u047b\u0001\u0000\u0000\u0000\u0138\u0485\u0001\u0000\u0000\u0000\u013a"+ + "\u0489\u0001\u0000\u0000\u0000\u013c\u048d\u0001\u0000\u0000\u0000\u013e"+ + "\u0491\u0001\u0000\u0000\u0000\u0140\u0496\u0001\u0000\u0000\u0000\u0142"+ + "\u049d\u0001\u0000\u0000\u0000\u0144\u04a1\u0001\u0000\u0000\u0000\u0146"+ + "\u04a5\u0001\u0000\u0000\u0000\u0148\u04a9\u0001\u0000\u0000\u0000\u014a"+ + "\u014b\u0005d\u0000\u0000\u014b\u014c\u0005i\u0000\u0000\u014c\u014d\u0005"+ + "s\u0000\u0000\u014d\u014e\u0005s\u0000\u0000\u014e\u014f\u0005e\u0000"+ + "\u0000\u014f\u0150\u0005c\u0000\u0000\u0150\u0151\u0005t\u0000\u0000\u0151"+ + "\u0152\u0001\u0000\u0000\u0000\u0152\u0153\u0006\u0000\u0000\u0000\u0153"+ + "\r\u0001\u0000\u0000\u0000\u0154\u0155\u0005d\u0000\u0000\u0155\u0156"+ + "\u0005r\u0000\u0000\u0156\u0157\u0005o\u0000\u0000\u0157\u0158\u0005p"+ + "\u0000\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u0159\u015a\u0006\u0001"+ + "\u0001\u0000\u015a\u000f\u0001\u0000\u0000\u0000\u015b\u015c\u0005e\u0000"+ + "\u0000\u015c\u015d\u0005n\u0000\u0000\u015d\u015e\u0005r\u0000\u0000\u015e"+ + "\u015f\u0005i\u0000\u0000\u015f\u0160\u0005c\u0000\u0000\u0160\u0161\u0005"+ + "h\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162\u0163\u0006\u0002"+ + "\u0002\u0000\u0163\u0011\u0001\u0000\u0000\u0000\u0164\u0165\u0005e\u0000"+ + "\u0000\u0165\u0166\u0005v\u0000\u0000\u0166\u0167\u0005a\u0000\u0000\u0167"+ + "\u0168\u0005l\u0000\u0000\u0168\u0169\u0001\u0000\u0000\u0000\u0169\u016a"+ + "\u0006\u0003\u0000\u0000\u016a\u0013\u0001\u0000\u0000\u0000\u016b\u016c"+ + "\u0005e\u0000\u0000\u016c\u016d\u0005x\u0000\u0000\u016d\u016e\u0005p"+ + "\u0000\u0000\u016e\u016f\u0005l\u0000\u0000\u016f\u0170\u0005a\u0000\u0000"+ + "\u0170\u0171\u0005i\u0000\u0000\u0171\u0172\u0005n\u0000\u0000\u0172\u0173"+ + "\u0001\u0000\u0000\u0000\u0173\u0174\u0006\u0004\u0003\u0000\u0174\u0015"+ + "\u0001\u0000\u0000\u0000\u0175\u0176\u0005f\u0000\u0000\u0176\u0177\u0005"+ + "r\u0000\u0000\u0177\u0178\u0005o\u0000\u0000\u0178\u0179\u0005m\u0000"+ + "\u0000\u0179\u017a\u0001\u0000\u0000\u0000\u017a\u017b\u0006\u0005\u0004"+ + "\u0000\u017b\u0017\u0001\u0000\u0000\u0000\u017c\u017d\u0005g\u0000\u0000"+ + "\u017d\u017e\u0005r\u0000\u0000\u017e\u017f\u0005o\u0000\u0000\u017f\u0180"+ + "\u0005k\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000\u0181\u0182\u0006"+ + "\u0006\u0000\u0000\u0182\u0019\u0001\u0000\u0000\u0000\u0183\u0184\u0005"+ + "i\u0000\u0000\u0184\u0185\u0005n\u0000\u0000\u0185\u0186\u0005l\u0000"+ + "\u0000\u0186\u0187\u0005i\u0000\u0000\u0187\u0188\u0005n\u0000\u0000\u0188"+ + "\u0189\u0005e\u0000\u0000\u0189\u018a\u0005s\u0000\u0000\u018a\u018b\u0005"+ + "t\u0000\u0000\u018b\u018c\u0005a\u0000\u0000\u018c\u018d\u0005t\u0000"+ + "\u0000\u018d\u018e\u0005s\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000"+ + "\u018f\u0190\u0006\u0007\u0000\u0000\u0190\u001b\u0001\u0000\u0000\u0000"+ + "\u0191\u0192\u0005k\u0000\u0000\u0192\u0193\u0005e\u0000\u0000\u0193\u0194"+ + "\u0005e\u0000\u0000\u0194\u0195\u0005p\u0000\u0000\u0195\u0196\u0001\u0000"+ + "\u0000\u0000\u0196\u0197\u0006\b\u0001\u0000\u0197\u001d\u0001\u0000\u0000"+ + "\u0000\u0198\u0199\u0005l\u0000\u0000\u0199\u019a\u0005i\u0000\u0000\u019a"+ + "\u019b\u0005m\u0000\u0000\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005"+ + "t\u0000\u0000\u019d\u019e\u0001\u0000\u0000\u0000\u019e\u019f\u0006\t"+ + "\u0000\u0000\u019f\u001f\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005m\u0000"+ + "\u0000\u01a1\u01a2\u0005e\u0000\u0000\u01a2\u01a3\u0005t\u0000\u0000\u01a3"+ + "\u01a4\u0005a\u0000\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a6"+ + "\u0006\n\u0005\u0000\u01a6!\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005"+ + "m\u0000\u0000\u01a8\u01a9\u0005v\u0000\u0000\u01a9\u01aa\u0005_\u0000"+ + "\u0000\u01aa\u01ab\u0005e\u0000\u0000\u01ab\u01ac\u0005x\u0000\u0000\u01ac"+ + "\u01ad\u0005p\u0000\u0000\u01ad\u01ae\u0005a\u0000\u0000\u01ae\u01af\u0005"+ + "n\u0000\u0000\u01af\u01b0\u0005d\u0000\u0000\u01b0\u01b1\u0001\u0000\u0000"+ + "\u0000\u01b1\u01b2\u0006\u000b\u0006\u0000\u01b2#\u0001\u0000\u0000\u0000"+ + "\u01b3\u01b4\u0005r\u0000\u0000\u01b4\u01b5\u0005e\u0000\u0000\u01b5\u01b6"+ + "\u0005n\u0000\u0000\u01b6\u01b7\u0005a\u0000\u0000\u01b7\u01b8\u0005m"+ + "\u0000\u0000\u01b8\u01b9\u0005e\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000"+ + "\u0000\u01ba\u01bb\u0006\f\u0007\u0000\u01bb%\u0001\u0000\u0000\u0000"+ + "\u01bc\u01bd\u0005r\u0000\u0000\u01bd\u01be\u0005o\u0000\u0000\u01be\u01bf"+ + "\u0005w\u0000\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c1\u0006"+ + "\r\u0000\u0000\u01c1\'\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005s\u0000"+ + "\u0000\u01c3\u01c4\u0005h\u0000\u0000\u01c4\u01c5\u0005o\u0000\u0000\u01c5"+ + "\u01c6\u0005w\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u01c8"+ + "\u0006\u000e\b\u0000\u01c8)\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005"+ + "s\u0000\u0000\u01ca\u01cb\u0005o\u0000\u0000\u01cb\u01cc\u0005r\u0000"+ + "\u0000\u01cc\u01cd\u0005t\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000\u0000"+ + "\u01ce\u01cf\u0006\u000f\u0000\u0000\u01cf+\u0001\u0000\u0000\u0000\u01d0"+ + "\u01d1\u0005s\u0000\u0000\u01d1\u01d2\u0005t\u0000\u0000\u01d2\u01d3\u0005"+ + "a\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000\u01d4\u01d5\u0005s\u0000"+ + "\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0010\u0000"+ + "\u0000\u01d7-\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005w\u0000\u0000\u01d9"+ + "\u01da\u0005h\u0000\u0000\u01da\u01db\u0005e\u0000\u0000\u01db\u01dc\u0005"+ + "r\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd\u01de\u0001\u0000\u0000"+ + "\u0000\u01de\u01df\u0006\u0011\u0000\u0000\u01df/\u0001\u0000\u0000\u0000"+ + "\u01e0\u01e2\b\u0000\u0000\u0000\u01e1\u01e0\u0001\u0000\u0000\u0000\u01e2"+ + "\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000\u0000\u0000\u01e3"+ + "\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5"+ + "\u01e6\u0006\u0012\u0000\u0000\u01e61\u0001\u0000\u0000\u0000\u01e7\u01e8"+ + "\u0005/\u0000\u0000\u01e8\u01e9\u0005/\u0000\u0000\u01e9\u01ed\u0001\u0000"+ + "\u0000\u0000\u01ea\u01ec\b\u0001\u0000\u0000\u01eb\u01ea\u0001\u0000\u0000"+ + "\u0000\u01ec\u01ef\u0001\u0000\u0000\u0000\u01ed\u01eb\u0001\u0000\u0000"+ + "\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000"+ + "\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f2\u0005\r\u0000\u0000"+ + "\u01f1\u01f0\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f4\u0001\u0000\u0000\u0000\u01f3\u01f5\u0005\n\u0000\u0000\u01f4"+ + "\u01f3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5"+ + "\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\u0013\t\u0000\u01f73"+ + "\u0001\u0000\u0000\u0000\u01f8\u01f9\u0005/\u0000\u0000\u01f9\u01fa\u0005"+ + "*\u0000\u0000\u01fa\u01ff\u0001\u0000\u0000\u0000\u01fb\u01fe\u00034\u0014"+ + "\u0000\u01fc\u01fe\t\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fd\u01fc\u0001\u0000\u0000\u0000\u01fe\u0201\u0001\u0000\u0000\u0000"+ + "\u01ff\u0200\u0001\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000\u0000\u0000"+ + "\u0200\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0005*\u0000\u0000\u0203\u0204\u0005/\u0000\u0000\u0204\u0205"+ + "\u0001\u0000\u0000\u0000\u0205\u0206\u0006\u0014\t\u0000\u02065\u0001"+ + "\u0000\u0000\u0000\u0207\u0209\u0007\u0002\u0000\u0000\u0208\u0207\u0001"+ + "\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u020a\u0208\u0001"+ + "\u0000\u0000\u0000\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0001"+ + "\u0000\u0000\u0000\u020c\u020d\u0006\u0015\t\u0000\u020d7\u0001\u0000"+ + "\u0000\u0000\u020e\u020f\u0003\u00a4L\u0000\u020f\u0210\u0001\u0000\u0000"+ + "\u0000\u0210\u0211\u0006\u0016\n\u0000\u0211\u0212\u0006\u0016\u000b\u0000"+ + "\u02129\u0001\u0000\u0000\u0000\u0213\u0214\u0003B\u001b\u0000\u0214\u0215"+ + "\u0001\u0000\u0000\u0000\u0215\u0216\u0006\u0017\f\u0000\u0216\u0217\u0006"+ + "\u0017\r\u0000\u0217;\u0001\u0000\u0000\u0000\u0218\u0219\u00036\u0015"+ + "\u0000\u0219\u021a\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u0018\t\u0000"+ + "\u021b=\u0001\u0000\u0000\u0000\u021c\u021d\u00032\u0013\u0000\u021d\u021e"+ + "\u0001\u0000\u0000\u0000\u021e\u021f\u0006\u0019\t\u0000\u021f?\u0001"+ + "\u0000\u0000\u0000\u0220\u0221\u00034\u0014\u0000\u0221\u0222\u0001\u0000"+ + "\u0000\u0000\u0222\u0223\u0006\u001a\t\u0000\u0223A\u0001\u0000\u0000"+ + "\u0000\u0224\u0225\u0005|\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000"+ + "\u0226\u0227\u0006\u001b\r\u0000\u0227C\u0001\u0000\u0000\u0000\u0228"+ + "\u0229\u0007\u0003\u0000\u0000\u0229E\u0001\u0000\u0000\u0000\u022a\u022b"+ + "\u0007\u0004\u0000\u0000\u022bG\u0001\u0000\u0000\u0000\u022c\u022d\u0005"+ + "\\\u0000\u0000\u022d\u022e\u0007\u0005\u0000\u0000\u022eI\u0001\u0000"+ + "\u0000\u0000\u022f\u0230\b\u0006\u0000\u0000\u0230K\u0001\u0000\u0000"+ + "\u0000\u0231\u0233\u0007\u0007\u0000\u0000\u0232\u0234\u0007\b\u0000\u0000"+ + "\u0233\u0232\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000"+ + "\u0234\u0236\u0001\u0000\u0000\u0000\u0235\u0237\u0003D\u001c\u0000\u0236"+ + "\u0235\u0001\u0000\u0000\u0000\u0237\u0238\u0001\u0000\u0000\u0000\u0238"+ + "\u0236\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239"+ + "M\u0001\u0000\u0000\u0000\u023a\u023b\u0005@\u0000\u0000\u023bO\u0001"+ + "\u0000\u0000\u0000\u023c\u023d\u0005`\u0000\u0000\u023dQ\u0001\u0000\u0000"+ + "\u0000\u023e\u0242\b\t\u0000\u0000\u023f\u0240\u0005`\u0000\u0000\u0240"+ + "\u0242\u0005`\u0000\u0000\u0241\u023e\u0001\u0000\u0000\u0000\u0241\u023f"+ + "\u0001\u0000\u0000\u0000\u0242S\u0001\u0000\u0000\u0000\u0243\u0244\u0005"+ + "_\u0000\u0000\u0244U\u0001\u0000\u0000\u0000\u0245\u0249\u0003F\u001d"+ + "\u0000\u0246\u0249\u0003D\u001c\u0000\u0247\u0249\u0003T$\u0000\u0248"+ + "\u0245\u0001\u0000\u0000\u0000\u0248\u0246\u0001\u0000\u0000\u0000\u0248"+ + "\u0247\u0001\u0000\u0000\u0000\u0249W\u0001\u0000\u0000\u0000\u024a\u024f"+ + "\u0005\"\u0000\u0000\u024b\u024e\u0003H\u001e\u0000\u024c\u024e\u0003"+ + "J\u001f\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024d\u024c\u0001\u0000"+ + "\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000"+ + "\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0252\u0001\u0000"+ + "\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0268\u0005\"\u0000"+ + "\u0000\u0253\u0254\u0005\"\u0000\u0000\u0254\u0255\u0005\"\u0000\u0000"+ + "\u0255\u0256\u0005\"\u0000\u0000\u0256\u025a\u0001\u0000\u0000\u0000\u0257"+ + "\u0259\b\u0001\u0000\u0000\u0258\u0257\u0001\u0000\u0000\u0000\u0259\u025c"+ + "\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025a\u0258"+ + "\u0001\u0000\u0000\u0000\u025b\u025d\u0001\u0000\u0000\u0000\u025c\u025a"+ + "\u0001\u0000\u0000\u0000\u025d\u025e\u0005\"\u0000\u0000\u025e\u025f\u0005"+ + "\"\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0262\u0001\u0000"+ + "\u0000\u0000\u0261\u0263\u0005\"\u0000\u0000\u0262\u0261\u0001\u0000\u0000"+ + "\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0265\u0001\u0000\u0000"+ + "\u0000\u0264\u0266\u0005\"\u0000\u0000\u0265\u0264\u0001\u0000\u0000\u0000"+ + "\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0268\u0001\u0000\u0000\u0000"+ + "\u0267\u024a\u0001\u0000\u0000\u0000\u0267\u0253\u0001\u0000\u0000\u0000"+ + "\u0268Y\u0001\u0000\u0000\u0000\u0269\u026b\u0003D\u001c\u0000\u026a\u0269"+ + "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ + "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d[\u0001"+ + "\u0000\u0000\u0000\u026e\u0270\u0003D\u001c\u0000\u026f\u026e\u0001\u0000"+ + "\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u026f\u0001\u0000"+ + "\u0000\u0000\u0271\u0272\u0001\u0000\u0000\u0000\u0272\u0273\u0001\u0000"+ + "\u0000\u0000\u0273\u0277\u0003l0\u0000\u0274\u0276\u0003D\u001c\u0000"+ + "\u0275\u0274\u0001\u0000\u0000\u0000\u0276\u0279\u0001\u0000\u0000\u0000"+ + "\u0277\u0275\u0001\u0000\u0000\u0000\u0277\u0278\u0001\u0000\u0000\u0000"+ + "\u0278\u0299\u0001\u0000\u0000\u0000\u0279\u0277\u0001\u0000\u0000\u0000"+ + "\u027a\u027c\u0003l0\u0000\u027b\u027d\u0003D\u001c\u0000\u027c\u027b"+ + "\u0001\u0000\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u027c"+ + "\u0001\u0000\u0000\u0000\u027e\u027f\u0001\u0000\u0000\u0000\u027f\u0299"+ + "\u0001\u0000\u0000\u0000\u0280\u0282\u0003D\u001c\u0000\u0281\u0280\u0001"+ + "\u0000\u0000\u0000\u0282\u0283\u0001\u0000\u0000\u0000\u0283\u0281\u0001"+ + "\u0000\u0000\u0000\u0283\u0284\u0001\u0000\u0000\u0000\u0284\u028c\u0001"+ + "\u0000\u0000\u0000\u0285\u0289\u0003l0\u0000\u0286\u0288\u0003D\u001c"+ + "\u0000\u0287\u0286\u0001\u0000\u0000\u0000\u0288\u028b\u0001\u0000\u0000"+ + "\u0000\u0289\u0287\u0001\u0000\u0000\u0000\u0289\u028a\u0001\u0000\u0000"+ + "\u0000\u028a\u028d\u0001\u0000\u0000\u0000\u028b\u0289\u0001\u0000\u0000"+ + "\u0000\u028c\u0285\u0001\u0000\u0000\u0000\u028c\u028d\u0001\u0000\u0000"+ + "\u0000\u028d\u028e\u0001\u0000\u0000\u0000\u028e\u028f\u0003L \u0000\u028f"+ + "\u0299\u0001\u0000\u0000\u0000\u0290\u0292\u0003l0\u0000\u0291\u0293\u0003"+ + "D\u001c\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293\u0294\u0001\u0000"+ + "\u0000\u0000\u0294\u0292\u0001\u0000\u0000\u0000\u0294\u0295\u0001\u0000"+ + "\u0000\u0000\u0295\u0296\u0001\u0000\u0000\u0000\u0296\u0297\u0003L \u0000"+ + "\u0297\u0299\u0001\u0000\u0000\u0000\u0298\u026f\u0001\u0000\u0000\u0000"+ + "\u0298\u027a\u0001\u0000\u0000\u0000\u0298\u0281\u0001\u0000\u0000\u0000"+ + "\u0298\u0290\u0001\u0000\u0000\u0000\u0299]\u0001\u0000\u0000\u0000\u029a"+ + "\u029b\u0005b\u0000\u0000\u029b\u029c\u0005y\u0000\u0000\u029c_\u0001"+ + "\u0000\u0000\u0000\u029d\u029e\u0005a\u0000\u0000\u029e\u029f\u0005n\u0000"+ + "\u0000\u029f\u02a0\u0005d\u0000\u0000\u02a0a\u0001\u0000\u0000\u0000\u02a1"+ + "\u02a2\u0005a\u0000\u0000\u02a2\u02a3\u0005s\u0000\u0000\u02a3\u02a4\u0005"+ + "c\u0000\u0000\u02a4c\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005=\u0000"+ + "\u0000\u02a6e\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005:\u0000\u0000\u02a8"+ + "\u02a9\u0005:\u0000\u0000\u02a9g\u0001\u0000\u0000\u0000\u02aa\u02ab\u0005"+ + ",\u0000\u0000\u02abi\u0001\u0000\u0000\u0000\u02ac\u02ad\u0005d\u0000"+ + "\u0000\u02ad\u02ae\u0005e\u0000\u0000\u02ae\u02af\u0005s\u0000\u0000\u02af"+ + "\u02b0\u0005c\u0000\u0000\u02b0k\u0001\u0000\u0000\u0000\u02b1\u02b2\u0005"+ + ".\u0000\u0000\u02b2m\u0001\u0000\u0000\u0000\u02b3\u02b4\u0005f\u0000"+ + "\u0000\u02b4\u02b5\u0005a\u0000\u0000\u02b5\u02b6\u0005l\u0000\u0000\u02b6"+ + "\u02b7\u0005s\u0000\u0000\u02b7\u02b8\u0005e\u0000\u0000\u02b8o\u0001"+ + "\u0000\u0000\u0000\u02b9\u02ba\u0005f\u0000\u0000\u02ba\u02bb\u0005i\u0000"+ + "\u0000\u02bb\u02bc\u0005r\u0000\u0000\u02bc\u02bd\u0005s\u0000\u0000\u02bd"+ + "\u02be\u0005t\u0000\u0000\u02beq\u0001\u0000\u0000\u0000\u02bf\u02c0\u0005"+ + "l\u0000\u0000\u02c0\u02c1\u0005a\u0000\u0000\u02c1\u02c2\u0005s\u0000"+ + "\u0000\u02c2\u02c3\u0005t\u0000\u0000\u02c3s\u0001\u0000\u0000\u0000\u02c4"+ + "\u02c5\u0005(\u0000\u0000\u02c5u\u0001\u0000\u0000\u0000\u02c6\u02c7\u0005"+ + "i\u0000\u0000\u02c7\u02c8\u0005n\u0000\u0000\u02c8w\u0001\u0000\u0000"+ + "\u0000\u02c9\u02ca\u0005i\u0000\u0000\u02ca\u02cb\u0005s\u0000\u0000\u02cb"+ + "y\u0001\u0000\u0000\u0000\u02cc\u02cd\u0005l\u0000\u0000\u02cd\u02ce\u0005"+ + "i\u0000\u0000\u02ce\u02cf\u0005k\u0000\u0000\u02cf\u02d0\u0005e\u0000"+ + "\u0000\u02d0{\u0001\u0000\u0000\u0000\u02d1\u02d2\u0005n\u0000\u0000\u02d2"+ + "\u02d3\u0005o\u0000\u0000\u02d3\u02d4\u0005t\u0000\u0000\u02d4}\u0001"+ + "\u0000\u0000\u0000\u02d5\u02d6\u0005n\u0000\u0000\u02d6\u02d7\u0005u\u0000"+ + "\u0000\u02d7\u02d8\u0005l\u0000\u0000\u02d8\u02d9\u0005l\u0000\u0000\u02d9"+ + "\u007f\u0001\u0000\u0000\u0000\u02da\u02db\u0005n\u0000\u0000\u02db\u02dc"+ + "\u0005u\u0000\u0000\u02dc\u02dd\u0005l\u0000\u0000\u02dd\u02de\u0005l"+ + "\u0000\u0000\u02de\u02df\u0005s\u0000\u0000\u02df\u0081\u0001\u0000\u0000"+ + "\u0000\u02e0\u02e1\u0005o\u0000\u0000\u02e1\u02e2\u0005r\u0000\u0000\u02e2"+ + "\u0083\u0001\u0000\u0000\u0000\u02e3\u02e4\u0005?\u0000\u0000\u02e4\u0085"+ + "\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005r\u0000\u0000\u02e6\u02e7\u0005"+ + "l\u0000\u0000\u02e7\u02e8\u0005i\u0000\u0000\u02e8\u02e9\u0005k\u0000"+ + "\u0000\u02e9\u02ea\u0005e\u0000\u0000\u02ea\u0087\u0001\u0000\u0000\u0000"+ + "\u02eb\u02ec\u0005)\u0000\u0000\u02ec\u0089\u0001\u0000\u0000\u0000\u02ed"+ + "\u02ee\u0005t\u0000\u0000\u02ee\u02ef\u0005r\u0000\u0000\u02ef\u02f0\u0005"+ + "u\u0000\u0000\u02f0\u02f1\u0005e\u0000\u0000\u02f1\u008b\u0001\u0000\u0000"+ + "\u0000\u02f2\u02f3\u0005=\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000\u02f4"+ + "\u008d\u0001\u0000\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000\u02f6\u02f7"+ + "\u0005~\u0000\u0000\u02f7\u008f\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005"+ + "!\u0000\u0000\u02f9\u02fa\u0005=\u0000\u0000\u02fa\u0091\u0001\u0000\u0000"+ + "\u0000\u02fb\u02fc\u0005<\u0000\u0000\u02fc\u0093\u0001\u0000\u0000\u0000"+ + "\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u02ff\u0005=\u0000\u0000\u02ff\u0095"+ + "\u0001\u0000\u0000\u0000\u0300\u0301\u0005>\u0000\u0000\u0301\u0097\u0001"+ + "\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0304\u0005=\u0000"+ + "\u0000\u0304\u0099\u0001\u0000\u0000\u0000\u0305\u0306\u0005+\u0000\u0000"+ + "\u0306\u009b\u0001\u0000\u0000\u0000\u0307\u0308\u0005-\u0000\u0000\u0308"+ + "\u009d\u0001\u0000\u0000\u0000\u0309\u030a\u0005*\u0000\u0000\u030a\u009f"+ + "\u0001\u0000\u0000\u0000\u030b\u030c\u0005/\u0000\u0000\u030c\u00a1\u0001"+ + "\u0000\u0000\u0000\u030d\u030e\u0005%\u0000\u0000\u030e\u00a3\u0001\u0000"+ + "\u0000\u0000\u030f\u0310\u0005[\u0000\u0000\u0310\u0311\u0001\u0000\u0000"+ + "\u0000\u0311\u0312\u0006L\u0000\u0000\u0312\u0313\u0006L\u0000\u0000\u0313"+ + "\u00a5\u0001\u0000\u0000\u0000\u0314\u0315\u0005]\u0000\u0000\u0315\u0316"+ + "\u0001\u0000\u0000\u0000\u0316\u0317\u0006M\r\u0000\u0317\u0318\u0006"+ + "M\r\u0000\u0318\u00a7\u0001\u0000\u0000\u0000\u0319\u031d\u0003F\u001d"+ + "\u0000\u031a\u031c\u0003V%\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c"+ + "\u031f\u0001\u0000\u0000\u0000\u031d\u031b\u0001\u0000\u0000\u0000\u031d"+ + "\u031e\u0001\u0000\u0000\u0000\u031e\u032a\u0001\u0000\u0000\u0000\u031f"+ + "\u031d\u0001\u0000\u0000\u0000\u0320\u0323\u0003T$\u0000\u0321\u0323\u0003"+ + "N!\u0000\u0322\u0320\u0001\u0000\u0000\u0000\u0322\u0321\u0001\u0000\u0000"+ + "\u0000\u0323\u0325\u0001\u0000\u0000\u0000\u0324\u0326\u0003V%\u0000\u0325"+ + "\u0324\u0001\u0000\u0000\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327"+ + "\u0325\u0001\u0000\u0000\u0000\u0327\u0328\u0001\u0000\u0000\u0000\u0328"+ + "\u032a\u0001\u0000\u0000\u0000\u0329\u0319\u0001\u0000\u0000\u0000\u0329"+ + "\u0322\u0001\u0000\u0000\u0000\u032a\u00a9\u0001\u0000\u0000\u0000\u032b"+ + "\u032d\u0003P\"\u0000\u032c\u032e\u0003R#\u0000\u032d\u032c\u0001\u0000"+ + "\u0000\u0000\u032e\u032f\u0001\u0000\u0000\u0000\u032f\u032d\u0001\u0000"+ + "\u0000\u0000\u032f\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000"+ + "\u0000\u0000\u0331\u0332\u0003P\"\u0000\u0332\u00ab\u0001\u0000\u0000"+ + "\u0000\u0333\u0334\u0003\u00aaO\u0000\u0334\u00ad\u0001\u0000\u0000\u0000"+ + "\u0335\u0336\u00032\u0013\u0000\u0336\u0337\u0001\u0000\u0000\u0000\u0337"+ + "\u0338\u0006Q\t\u0000\u0338\u00af\u0001\u0000\u0000\u0000\u0339\u033a"+ + "\u00034\u0014\u0000\u033a\u033b\u0001\u0000\u0000\u0000\u033b\u033c\u0006"+ + "R\t\u0000\u033c\u00b1\u0001\u0000\u0000\u0000\u033d\u033e\u00036\u0015"+ + "\u0000\u033e\u033f\u0001\u0000\u0000\u0000\u033f\u0340\u0006S\t\u0000"+ + "\u0340\u00b3\u0001\u0000\u0000\u0000\u0341\u0342\u0003B\u001b\u0000\u0342"+ + "\u0343\u0001\u0000\u0000\u0000\u0343\u0344\u0006T\f\u0000\u0344\u0345"+ + "\u0006T\r\u0000\u0345\u00b5\u0001\u0000\u0000\u0000\u0346\u0347\u0003"+ + "\u00a4L\u0000\u0347\u0348\u0001\u0000\u0000\u0000\u0348\u0349\u0006U\n"+ + "\u0000\u0349\u00b7\u0001\u0000\u0000\u0000\u034a\u034b\u0003\u00a6M\u0000"+ + "\u034b\u034c\u0001\u0000\u0000\u0000\u034c\u034d\u0006V\u000e\u0000\u034d"+ + "\u00b9\u0001\u0000\u0000\u0000\u034e\u034f\u0003h.\u0000\u034f\u0350\u0001"+ + "\u0000\u0000\u0000\u0350\u0351\u0006W\u000f\u0000\u0351\u00bb\u0001\u0000"+ + "\u0000\u0000\u0352\u0353\u0003d,\u0000\u0353\u0354\u0001\u0000\u0000\u0000"+ + "\u0354\u0355\u0006X\u0010\u0000\u0355\u00bd\u0001\u0000\u0000\u0000\u0356"+ + "\u0357\u0003X&\u0000\u0357\u0358\u0001\u0000\u0000\u0000\u0358\u0359\u0006"+ + "Y\u0011\u0000\u0359\u00bf\u0001\u0000\u0000\u0000\u035a\u035b\u0005o\u0000"+ + "\u0000\u035b\u035c\u0005p\u0000\u0000\u035c\u035d\u0005t\u0000\u0000\u035d"+ + "\u035e\u0005i\u0000\u0000\u035e\u035f\u0005o\u0000\u0000\u035f\u0360\u0005"+ + "n\u0000\u0000\u0360\u0361\u0005s\u0000\u0000\u0361\u00c1\u0001\u0000\u0000"+ + "\u0000\u0362\u0363\u0005m\u0000\u0000\u0363\u0364\u0005e\u0000\u0000\u0364"+ + "\u0365\u0005t\u0000\u0000\u0365\u0366\u0005a\u0000\u0000\u0366\u0367\u0005"+ + "d\u0000\u0000\u0367\u0368\u0005a\u0000\u0000\u0368\u0369\u0005t\u0000"+ + "\u0000\u0369\u036a\u0005a\u0000\u0000\u036a\u00c3\u0001\u0000\u0000\u0000"+ + "\u036b\u036f\b\n\u0000\u0000\u036c\u036d\u0005/\u0000\u0000\u036d\u036f"+ + "\b\u000b\u0000\u0000\u036e\u036b\u0001\u0000\u0000\u0000\u036e\u036c\u0001"+ + "\u0000\u0000\u0000\u036f\u00c5\u0001\u0000\u0000\u0000\u0370\u0372\u0003"+ + "\u00c4\\\u0000\u0371\u0370\u0001\u0000\u0000\u0000\u0372\u0373\u0001\u0000"+ + "\u0000\u0000\u0373\u0371\u0001\u0000\u0000\u0000\u0373\u0374\u0001\u0000"+ + "\u0000\u0000\u0374\u00c7\u0001\u0000\u0000\u0000\u0375\u0376\u00032\u0013"+ + "\u0000\u0376\u0377\u0001\u0000\u0000\u0000\u0377\u0378\u0006^\t\u0000"+ + "\u0378\u00c9\u0001\u0000\u0000\u0000\u0379\u037a\u00034\u0014\u0000\u037a"+ + "\u037b\u0001\u0000\u0000\u0000\u037b\u037c\u0006_\t\u0000\u037c\u00cb"+ + "\u0001\u0000\u0000\u0000\u037d\u037e\u00036\u0015\u0000\u037e\u037f\u0001"+ + "\u0000\u0000\u0000\u037f\u0380\u0006`\t\u0000\u0380\u00cd\u0001\u0000"+ + "\u0000\u0000\u0381\u0382\u0003B\u001b\u0000\u0382\u0383\u0001\u0000\u0000"+ + "\u0000\u0383\u0384\u0006a\f\u0000\u0384\u0385\u0006a\r\u0000\u0385\u00cf"+ + "\u0001\u0000\u0000\u0000\u0386\u0387\u0003l0\u0000\u0387\u0388\u0001\u0000"+ + "\u0000\u0000\u0388\u0389\u0006b\u0012\u0000\u0389\u00d1\u0001\u0000\u0000"+ + "\u0000\u038a\u038b\u0003h.\u0000\u038b\u038c\u0001\u0000\u0000\u0000\u038c"+ + "\u038d\u0006c\u000f\u0000\u038d\u00d3\u0001\u0000\u0000\u0000\u038e\u0393"+ + "\u0003F\u001d\u0000\u038f\u0393\u0003D\u001c\u0000\u0390\u0393\u0003T"+ + "$\u0000\u0391\u0393\u0003\u009eI\u0000\u0392\u038e\u0001\u0000\u0000\u0000"+ + "\u0392\u038f\u0001\u0000\u0000\u0000\u0392\u0390\u0001\u0000\u0000\u0000"+ + "\u0392\u0391\u0001\u0000\u0000\u0000\u0393\u00d5\u0001\u0000\u0000\u0000"+ + "\u0394\u0397\u0003F\u001d\u0000\u0395\u0397\u0003\u009eI\u0000\u0396\u0394"+ + "\u0001\u0000\u0000\u0000\u0396\u0395\u0001\u0000\u0000\u0000\u0397\u039b"+ + "\u0001\u0000\u0000\u0000\u0398\u039a\u0003\u00d4d\u0000\u0399\u0398\u0001"+ + "\u0000\u0000\u0000\u039a\u039d\u0001\u0000\u0000\u0000\u039b\u0399\u0001"+ + "\u0000\u0000\u0000\u039b\u039c\u0001\u0000\u0000\u0000\u039c\u03a8\u0001"+ + "\u0000\u0000\u0000\u039d\u039b\u0001\u0000\u0000\u0000\u039e\u03a1\u0003"+ + "T$\u0000\u039f\u03a1\u0003N!\u0000\u03a0\u039e\u0001\u0000\u0000\u0000"+ + "\u03a0\u039f\u0001\u0000\u0000\u0000\u03a1\u03a3\u0001\u0000\u0000\u0000"+ + "\u03a2\u03a4\u0003\u00d4d\u0000\u03a3\u03a2\u0001\u0000\u0000\u0000\u03a4"+ + "\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a3\u0001\u0000\u0000\u0000\u03a5"+ + "\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a8\u0001\u0000\u0000\u0000\u03a7"+ + "\u0396\u0001\u0000\u0000\u0000\u03a7\u03a0\u0001\u0000\u0000\u0000\u03a8"+ + "\u00d7\u0001\u0000\u0000\u0000\u03a9\u03ac\u0003\u00d6e\u0000\u03aa\u03ac"+ + "\u0003\u00aaO\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000\u03ab\u03aa\u0001"+ + "\u0000\u0000\u0000\u03ac\u03ad\u0001\u0000\u0000\u0000\u03ad\u03ab\u0001"+ + "\u0000\u0000\u0000\u03ad\u03ae\u0001\u0000\u0000\u0000\u03ae\u00d9\u0001"+ + "\u0000\u0000\u0000\u03af\u03b0\u00032\u0013\u0000\u03b0\u03b1\u0001\u0000"+ + "\u0000\u0000\u03b1\u03b2\u0006g\t\u0000\u03b2\u00db\u0001\u0000\u0000"+ + "\u0000\u03b3\u03b4\u00034\u0014\u0000\u03b4\u03b5\u0001\u0000\u0000\u0000"+ + "\u03b5\u03b6\u0006h\t\u0000\u03b6\u00dd\u0001\u0000\u0000\u0000\u03b7"+ + "\u03b8\u00036\u0015\u0000\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9\u03ba"+ + "\u0006i\t\u0000\u03ba\u00df\u0001\u0000\u0000\u0000\u03bb\u03bc\u0003"+ + "B\u001b\u0000\u03bc\u03bd\u0001\u0000\u0000\u0000\u03bd\u03be\u0006j\f"+ + "\u0000\u03be\u03bf\u0006j\r\u0000\u03bf\u00e1\u0001\u0000\u0000\u0000"+ + "\u03c0\u03c1\u0003d,\u0000\u03c1\u03c2\u0001\u0000\u0000\u0000\u03c2\u03c3"+ + "\u0006k\u0010\u0000\u03c3\u00e3\u0001\u0000\u0000\u0000\u03c4\u03c5\u0003"+ + "h.\u0000\u03c5\u03c6\u0001\u0000\u0000\u0000\u03c6\u03c7\u0006l\u000f"+ + "\u0000\u03c7\u00e5\u0001\u0000\u0000\u0000\u03c8\u03c9\u0003l0\u0000\u03c9"+ + "\u03ca\u0001\u0000\u0000\u0000\u03ca\u03cb\u0006m\u0012\u0000\u03cb\u00e7"+ + "\u0001\u0000\u0000\u0000\u03cc\u03cd\u0005a\u0000\u0000\u03cd\u03ce\u0005"+ + "s\u0000\u0000\u03ce\u00e9\u0001\u0000\u0000\u0000\u03cf\u03d0\u0003\u00d8"+ + "f\u0000\u03d0\u03d1\u0001\u0000\u0000\u0000\u03d1\u03d2\u0006o\u0013\u0000"+ + "\u03d2\u00eb\u0001\u0000\u0000\u0000\u03d3\u03d4\u00032\u0013\u0000\u03d4"+ + "\u03d5\u0001\u0000\u0000\u0000\u03d5\u03d6\u0006p\t\u0000\u03d6\u00ed"+ + "\u0001\u0000\u0000\u0000\u03d7\u03d8\u00034\u0014\u0000\u03d8\u03d9\u0001"+ + "\u0000\u0000\u0000\u03d9\u03da\u0006q\t\u0000\u03da\u00ef\u0001\u0000"+ + "\u0000\u0000\u03db\u03dc\u00036\u0015\u0000\u03dc\u03dd\u0001\u0000\u0000"+ + "\u0000\u03dd\u03de\u0006r\t\u0000\u03de\u00f1\u0001\u0000\u0000\u0000"+ + "\u03df\u03e0\u0003B\u001b\u0000\u03e0\u03e1\u0001\u0000\u0000\u0000\u03e1"+ + "\u03e2\u0006s\f\u0000\u03e2\u03e3\u0006s\r\u0000\u03e3\u00f3\u0001\u0000"+ + "\u0000\u0000\u03e4\u03e5\u0003\u00a4L\u0000\u03e5\u03e6\u0001\u0000\u0000"+ + "\u0000\u03e6\u03e7\u0006t\n\u0000\u03e7\u03e8\u0006t\u0014\u0000\u03e8"+ + "\u00f5\u0001\u0000\u0000\u0000\u03e9\u03ea\u0005o\u0000\u0000\u03ea\u03eb"+ + "\u0005n\u0000\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ed\u0006"+ + "u\u0015\u0000\u03ed\u00f7\u0001\u0000\u0000\u0000\u03ee\u03ef\u0005w\u0000"+ + "\u0000\u03ef\u03f0\u0005i\u0000\u0000\u03f0\u03f1\u0005t\u0000\u0000\u03f1"+ + "\u03f2\u0005h\u0000\u0000\u03f2\u03f3\u0001\u0000\u0000\u0000\u03f3\u03f4"+ + "\u0006v\u0015\u0000\u03f4\u00f9\u0001\u0000\u0000\u0000\u03f5\u03f6\b"+ + "\f\u0000\u0000\u03f6\u00fb\u0001\u0000\u0000\u0000\u03f7\u03f9\u0003\u00fa"+ + "w\u0000\u03f8\u03f7\u0001\u0000\u0000\u0000\u03f9\u03fa\u0001\u0000\u0000"+ + "\u0000\u03fa\u03f8\u0001\u0000\u0000\u0000\u03fa\u03fb\u0001\u0000\u0000"+ + "\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd\u0003\u0140\u009a"+ + "\u0000\u03fd\u03ff\u0001\u0000\u0000\u0000\u03fe\u03f8\u0001\u0000\u0000"+ + "\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0401\u0001\u0000\u0000"+ + "\u0000\u0400\u0402\u0003\u00faw\u0000\u0401\u0400\u0001\u0000\u0000\u0000"+ + "\u0402\u0403\u0001\u0000\u0000\u0000\u0403\u0401\u0001\u0000\u0000\u0000"+ + "\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u00fd\u0001\u0000\u0000\u0000"+ + "\u0405\u0406\u0003\u00acP\u0000\u0406\u0407\u0001\u0000\u0000\u0000\u0407"+ + "\u0408\u0006y\u0016\u0000\u0408\u00ff\u0001\u0000\u0000\u0000\u0409\u040a"+ + "\u0003\u00fcx\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c\u0006"+ + "z\u0017\u0000\u040c\u0101\u0001\u0000\u0000\u0000\u040d\u040e\u00032\u0013"+ + "\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410\u0006{\t\u0000"+ + "\u0410\u0103\u0001\u0000\u0000\u0000\u0411\u0412\u00034\u0014\u0000\u0412"+ + "\u0413\u0001\u0000\u0000\u0000\u0413\u0414\u0006|\t\u0000\u0414\u0105"+ + "\u0001\u0000\u0000\u0000\u0415\u0416\u00036\u0015\u0000\u0416\u0417\u0001"+ + "\u0000\u0000\u0000\u0417\u0418\u0006}\t\u0000\u0418\u0107\u0001\u0000"+ + "\u0000\u0000\u0419\u041a\u0003B\u001b\u0000\u041a\u041b\u0001\u0000\u0000"+ + "\u0000\u041b\u041c\u0006~\f\u0000\u041c\u041d\u0006~\r\u0000\u041d\u041e"+ + "\u0006~\r\u0000\u041e\u0109\u0001\u0000\u0000\u0000\u041f\u0420\u0003"+ + "d,\u0000\u0420\u0421\u0001\u0000\u0000\u0000\u0421\u0422\u0006\u007f\u0010"+ + "\u0000\u0422\u010b\u0001\u0000\u0000\u0000\u0423\u0424\u0003h.\u0000\u0424"+ + "\u0425\u0001\u0000\u0000\u0000\u0425\u0426\u0006\u0080\u000f\u0000\u0426"+ + "\u010d\u0001\u0000\u0000\u0000\u0427\u0428\u0003l0\u0000\u0428\u0429\u0001"+ + "\u0000\u0000\u0000\u0429\u042a\u0006\u0081\u0012\u0000\u042a\u010f\u0001"+ + "\u0000\u0000\u0000\u042b\u042c\u0003\u00f8v\u0000\u042c\u042d\u0001\u0000"+ + "\u0000\u0000\u042d\u042e\u0006\u0082\u0018\u0000\u042e\u0111\u0001\u0000"+ + "\u0000\u0000\u042f\u0430\u0003\u00d8f\u0000\u0430\u0431\u0001\u0000\u0000"+ + "\u0000\u0431\u0432\u0006\u0083\u0013\u0000\u0432\u0113\u0001\u0000\u0000"+ + "\u0000\u0433\u0434\u0003\u00acP\u0000\u0434\u0435\u0001\u0000\u0000\u0000"+ + "\u0435\u0436\u0006\u0084\u0016\u0000\u0436\u0115\u0001\u0000\u0000\u0000"+ + "\u0437\u0438\u00032\u0013\u0000\u0438\u0439\u0001\u0000\u0000\u0000\u0439"+ + "\u043a\u0006\u0085\t\u0000\u043a\u0117\u0001\u0000\u0000\u0000\u043b\u043c"+ + "\u00034\u0014\u0000\u043c\u043d\u0001\u0000\u0000\u0000\u043d\u043e\u0006"+ + "\u0086\t\u0000\u043e\u0119\u0001\u0000\u0000\u0000\u043f\u0440\u00036"+ + "\u0015\u0000\u0440\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006\u0087"+ + "\t\u0000\u0442\u011b\u0001\u0000\u0000\u0000\u0443\u0444\u0003B\u001b"+ + "\u0000\u0444\u0445\u0001\u0000\u0000\u0000\u0445\u0446\u0006\u0088\f\u0000"+ + "\u0446\u0447\u0006\u0088\r\u0000\u0447\u011d\u0001\u0000\u0000\u0000\u0448"+ + "\u0449\u0003l0\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006"+ + "\u0089\u0012\u0000\u044b\u011f\u0001\u0000\u0000\u0000\u044c\u044d\u0003"+ + "\u00acP\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006\u008a"+ + "\u0016\u0000\u044f\u0121\u0001\u0000\u0000\u0000\u0450\u0451\u0003\u00a8"+ + "N\u0000\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006\u008b\u0019"+ + "\u0000\u0453\u0123\u0001\u0000\u0000\u0000\u0454\u0455\u00032\u0013\u0000"+ + "\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457\u0006\u008c\t\u0000\u0457"+ + "\u0125\u0001\u0000\u0000\u0000\u0458\u0459\u00034\u0014\u0000\u0459\u045a"+ + "\u0001\u0000\u0000\u0000\u045a\u045b\u0006\u008d\t\u0000\u045b\u0127\u0001"+ + "\u0000\u0000\u0000\u045c\u045d\u00036\u0015\u0000\u045d\u045e\u0001\u0000"+ + "\u0000\u0000\u045e\u045f\u0006\u008e\t\u0000\u045f\u0129\u0001\u0000\u0000"+ + "\u0000\u0460\u0461\u0003B\u001b\u0000\u0461\u0462\u0001\u0000\u0000\u0000"+ + "\u0462\u0463\u0006\u008f\f\u0000\u0463\u0464\u0006\u008f\r\u0000\u0464"+ + "\u012b\u0001\u0000\u0000\u0000\u0465\u0466\u0005i\u0000\u0000\u0466\u0467"+ + "\u0005n\u0000\u0000\u0467\u0468\u0005f\u0000\u0000\u0468\u0469\u0005o"+ + "\u0000\u0000\u0469\u012d\u0001\u0000\u0000\u0000\u046a\u046b\u00032\u0013"+ + "\u0000\u046b\u046c\u0001\u0000\u0000\u0000\u046c\u046d\u0006\u0091\t\u0000"+ + "\u046d\u012f\u0001\u0000\u0000\u0000\u046e\u046f\u00034\u0014\u0000\u046f"+ + "\u0470\u0001\u0000\u0000\u0000\u0470\u0471\u0006\u0092\t\u0000\u0471\u0131"+ + "\u0001\u0000\u0000\u0000\u0472\u0473\u00036\u0015\u0000\u0473\u0474\u0001"+ + "\u0000\u0000\u0000\u0474\u0475\u0006\u0093\t\u0000\u0475\u0133\u0001\u0000"+ + "\u0000\u0000\u0476\u0477\u0003B\u001b\u0000\u0477\u0478\u0001\u0000\u0000"+ + "\u0000\u0478\u0479\u0006\u0094\f\u0000\u0479\u047a\u0006\u0094\r\u0000"+ + "\u047a\u0135\u0001\u0000\u0000\u0000\u047b\u047c\u0005f\u0000\u0000\u047c"+ + "\u047d\u0005u\u0000\u0000\u047d\u047e\u0005n\u0000\u0000\u047e\u047f\u0005"+ + "c\u0000\u0000\u047f\u0480\u0005t\u0000\u0000\u0480\u0481\u0005i\u0000"+ + "\u0000\u0481\u0482\u0005o\u0000\u0000\u0482\u0483\u0005n\u0000\u0000\u0483"+ + "\u0484\u0005s\u0000\u0000\u0484\u0137\u0001\u0000\u0000\u0000\u0485\u0486"+ + "\u00032\u0013\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u0488\u0006"+ + "\u0096\t\u0000\u0488\u0139\u0001\u0000\u0000\u0000\u0489\u048a\u00034"+ + "\u0014\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b\u048c\u0006\u0097"+ + "\t\u0000\u048c\u013b\u0001\u0000\u0000\u0000\u048d\u048e\u00036\u0015"+ + "\u0000\u048e\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0006\u0098\t\u0000"+ + "\u0490\u013d\u0001\u0000\u0000\u0000\u0491\u0492\u0003\u00a6M\u0000\u0492"+ + "\u0493\u0001\u0000\u0000\u0000\u0493\u0494\u0006\u0099\u000e\u0000\u0494"+ + "\u0495\u0006\u0099\r\u0000\u0495\u013f\u0001\u0000\u0000\u0000\u0496\u0497"+ + "\u0005:\u0000\u0000\u0497\u0141\u0001\u0000\u0000\u0000\u0498\u049e\u0003"+ + "N!\u0000\u0499\u049e\u0003D\u001c\u0000\u049a\u049e\u0003l0\u0000\u049b"+ + "\u049e\u0003F\u001d\u0000\u049c\u049e\u0003T$\u0000\u049d\u0498\u0001"+ + "\u0000\u0000\u0000\u049d\u0499\u0001\u0000\u0000\u0000\u049d\u049a\u0001"+ + "\u0000\u0000\u0000\u049d\u049b\u0001\u0000\u0000\u0000\u049d\u049c\u0001"+ + "\u0000\u0000\u0000\u049e\u049f\u0001\u0000\u0000\u0000\u049f\u049d\u0001"+ + "\u0000\u0000\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u0143\u0001"+ + "\u0000\u0000\u0000\u04a1\u04a2\u00032\u0013\u0000\u04a2\u04a3\u0001\u0000"+ + "\u0000\u0000\u04a3\u04a4\u0006\u009c\t\u0000\u04a4\u0145\u0001\u0000\u0000"+ + "\u0000\u04a5\u04a6\u00034\u0014\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000"+ + "\u04a7\u04a8\u0006\u009d\t\u0000\u04a8\u0147\u0001\u0000\u0000\u0000\u04a9"+ + "\u04aa\u00036\u0015\u0000\u04aa\u04ab\u0001\u0000\u0000\u0000\u04ab\u04ac"+ + "\u0006\u009e\t\u0000\u04ac\u0149\u0001\u0000\u0000\u0000:\u0000\u0001"+ + "\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e3\u01ed\u01f1\u01f4"+ + "\u01fd\u01ff\u020a\u0233\u0238\u0241\u0248\u024d\u024f\u025a\u0262\u0265"+ + "\u0267\u026c\u0271\u0277\u027e\u0283\u0289\u028c\u0294\u0298\u031d\u0322"+ + "\u0327\u0329\u032f\u036e\u0373\u0392\u0396\u039b\u03a0\u03a5\u03a7\u03ab"+ + "\u03ad\u03fa\u03fe\u0403\u049d\u049f\u001a\u0005\u0002\u0000\u0005\u0004"+ + "\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\n\u0000"+ + "\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007"+ + "A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000\u0007B\u0000"+ + "\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007%\u0000\u0007N\u0000"+ + "\u0005\u000b\u0000\u0005\u0007\u0000\u0007D\u0000\u0007X\u0000\u0007W"+ + "\u0000\u0007C\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index b4a8e60dd69aa..2b887065985d3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -282,4 +282,4 @@ enrichWithClause atn: -[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 2, 0, 68, 68, 74, 74, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 7, 2, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 3, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 4, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 5, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 6, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 7, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file +[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 7, 1, 0, 60, 61, 1, 0, 62, 64, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 5, 74, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 2, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 3, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 4, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 5, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 6, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 1f9c13c16cdd4..2f7f0468e455a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -2101,7 +2101,6 @@ public final FromCommandContext fromCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class FromIdentifierContext extends ParserRuleContext { public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } - public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } @SuppressWarnings("this-escape") public FromIdentifierContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2125,20 +2124,11 @@ public T accept(ParseTreeVisitor visitor) { public final FromIdentifierContext fromIdentifier() throws RecognitionException { FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); enterRule(_localctx, 32, RULE_fromIdentifier); - int _la; try { enterOuterAlt(_localctx, 1); { setState(296); - _la = _input.LA(1); - if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { - _errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } + match(FROM_UNQUOTED_IDENTIFIER); } } catch (RecognitionException re) { @@ -4971,32 +4961,32 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u00015\u00015\u00035\u021b\b5\u00015\u00015\u00015\u0000\u0004\u0002"+ "\n\u0010\u00126\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014"+ "\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfh"+ - "j\u0000\b\u0001\u0000<=\u0001\u0000>@\u0002\u0000DDJJ\u0001\u0000CD\u0002"+ - "\u0000 $$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000"+ - "l\u0001\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001"+ - "\u0000\u0000\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000"+ - "\u0000\u0000\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000"+ - "\u0000\u000e\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000"+ - "\u0000\u0012\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000"+ - "\u0000\u0016\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000"+ - "\u0000\u001a\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000"+ - "\u0000\u001e\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000"+ - "\"\u012a\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139"+ - "\u0001\u0000\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000"+ - "\u0000\u0000,\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u0000"+ - "0\u0153\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161"+ - "\u0001\u0000\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000"+ - "\u0000\u0000:\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000"+ - ">\u019c\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad"+ - "\u0001\u0000\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000"+ - "\u0000\u0000H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000"+ - "L\u01d2\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9"+ - "\u0001\u0000\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000"+ - "\u0000\u0000V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000"+ - "Z\u01f1\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7"+ - "\u0001\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000"+ - "\u0000\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000"+ - "h\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ + "j\u0000\u0007\u0001\u0000<=\u0001\u0000>@\u0001\u0000CD\u0002\u0000 "+ + "$$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000l\u0001\u0000"+ + "\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001\u0000\u0000"+ + "\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000\u0000\u0000"+ + "\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000\u0000\u000e"+ + "\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000\u0000\u0012"+ + "\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000\u0000\u0016"+ + "\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000\u0000\u001a"+ + "\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000\u0000\u001e"+ + "\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000\"\u012a"+ + "\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139\u0001\u0000"+ + "\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000\u0000\u0000"+ + ",\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u00000\u0153"+ + "\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161\u0001\u0000"+ + "\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000\u0000\u0000"+ + ":\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000>\u019c"+ + "\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad\u0001\u0000"+ + "\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000\u0000\u0000"+ + "H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000L\u01d2"+ + "\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9\u0001\u0000"+ + "\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000\u0000\u0000"+ + "V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000Z\u01f1"+ + "\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7\u0001"+ + "\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000\u0000"+ + "\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000h"+ + "\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ "\u0002\u0001\u0000mn\u0005\u0000\u0000\u0001n\u0001\u0001\u0000\u0000"+ "\u0000op\u0006\u0001\uffff\uffff\u0000pq\u0003\u0004\u0002\u0000qw\u0001"+ "\u0000\u0000\u0000rs\n\u0001\u0000\u0000st\u0005\u001a\u0000\u0000tv\u0003"+ @@ -5105,42 +5095,42 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0000\u0000\u0123\u0124\u0001\u0000\u0000\u0000\u0124\u0126\u0001\u0000"+ "\u0000\u0000\u0125\u0127\u0003\"\u0011\u0000\u0126\u0125\u0001\u0000\u0000"+ "\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u001f\u0001\u0000\u0000"+ - "\u0000\u0128\u0129\u0007\u0002\u0000\u0000\u0129!\u0001\u0000\u0000\u0000"+ - "\u012a\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d"+ - "\u0005#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000"+ - "\u0000\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000"+ - "\u0000\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000"+ - "\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000"+ - "\u0134\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001"+ - "\u0000\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015"+ - "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000"+ - "\u0000\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000"+ - "\u013c\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140"+ - "\u0003 \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001"+ - "\u0000\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001"+ - "\u0000\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000"+ - "\u0000\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000"+ - "\u0146\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149"+ - "\u0005\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001"+ - "\u0000\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003"+ - "\u001a\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ - "\u0000\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e"+ - "\u0000\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000"+ - "\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000"+ - "\u0153\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155"+ - "\u0156\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155"+ - "\u0001\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001"+ - "\u0000\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000"+ - "\u0000\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000"+ - "\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000"+ - "\u015e\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160"+ - "\u015e\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163"+ - "\u0005%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000"+ - "\u0000\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000"+ - "\u0000\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0003\u0000"+ - "\u0000\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c"+ - "9\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ + "\u0000\u0128\u0129\u0005J\u0000\u0000\u0129!\u0001\u0000\u0000\u0000\u012a"+ + "\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d\u0005"+ + "#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000\u0000"+ + "\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000\u0000"+ + "\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000\u0000"+ + "\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000\u0134"+ + "\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001\u0000"+ + "\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015\u0000"+ + "\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000\u0000"+ + "\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000\u013c"+ + "\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140\u0003"+ + " \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001\u0000"+ + "\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001\u0000"+ + "\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000\u0000"+ + "\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000\u0146"+ + "\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149\u0005"+ + "\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001\u0000"+ + "\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003\u001a"+ + "\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000\u0000"+ + "\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e\u0000"+ + "\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000\u0000"+ + "\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000\u0153"+ + "\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155\u0156"+ + "\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155\u0001"+ + "\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001\u0000"+ + "\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000\u0000"+ + "\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000\u015d"+ + "\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015e"+ + "\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160\u015e"+ + "\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163\u0005"+ + "%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000\u0000"+ + "\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000"+ + "\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000\u0000"+ + "\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0002\u0000\u0000"+ + "\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c9"+ + "\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ "Z-\u0000\u016f\u0170\u0005C\u0000\u0000\u0170\u0198\u0001\u0000\u0000"+ "\u0000\u0171\u0198\u0003X,\u0000\u0172\u0198\u0003Z-\u0000\u0173\u0198"+ "\u0003T*\u0000\u0174\u0198\u00051\u0000\u0000\u0175\u0198\u0003\\.\u0000"+ @@ -5172,10 +5162,10 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "@ \u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1\u01a4\u0001\u0000\u0000"+ "\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000"+ "\u0000\u01a3?\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ - "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0004\u0000\u0000\u01a7"+ + "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0003\u0000\u0000\u01a7"+ "\u01a6\u0001\u0000\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000\u01a8"+ "\u01ab\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005/\u0000\u0000\u01aa\u01ac"+ - "\u0007\u0005\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ + "\u0007\u0004\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ "\u0001\u0000\u0000\u0000\u01acA\u0001\u0000\u0000\u0000\u01ad\u01ae\u0005"+ "\t\u0000\u0000\u01ae\u01b3\u00034\u001a\u0000\u01af\u01b0\u0005#\u0000"+ "\u0000\u01b0\u01b2\u00034\u001a\u0000\u01b1\u01af\u0001\u0000\u0000\u0000"+ @@ -5204,7 +5194,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01dfQ\u0001\u0000"+ "\u0000\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e2\u00036\u001b"+ "\u0000\u01e2\u01e3\u0005!\u0000\u0000\u01e3\u01e4\u0003:\u001d\u0000\u01e4"+ - "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0006\u0000\u0000\u01e6U\u0001"+ + "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0005\u0000\u0000\u01e6U\u0001"+ "\u0000\u0000\u0000\u01e7\u01ea\u0003X,\u0000\u01e8\u01ea\u0003Z-\u0000"+ "\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000"+ "\u01eaW\u0001\u0000\u0000\u0000\u01eb\u01ed\u0007\u0000\u0000\u0000\u01ec"+ @@ -5214,7 +5204,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3"+ "\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u001c\u0000\u0000\u01f4[\u0001"+ "\u0000\u0000\u0000\u01f5\u01f6\u0005\u001b\u0000\u0000\u01f6]\u0001\u0000"+ - "\u0000\u0000\u01f7\u01f8\u0007\u0007\u0000\u0000\u01f8_\u0001\u0000\u0000"+ + "\u0000\u0000\u01f7\u01f8\u0007\u0006\u0000\u0000\u01f8_\u0001\u0000\u0000"+ "\u0000\u01f9\u01fa\u0005\u0005\u0000\u0000\u01fa\u01fb\u0003b1\u0000\u01fb"+ "a\u0001\u0000\u0000\u0000\u01fc\u01fd\u0005A\u0000\u0000\u01fd\u01fe\u0003"+ "\u0002\u0001\u0000\u01fe\u01ff\u0005B\u0000\u0000\u01ffc\u0001\u0000\u0000"+ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 67f8eb407ee11..7f0b5c73b9fb7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -25,7 +25,7 @@ public String visitIdentifier(IdentifierContext ctx) { @Override public String visitFromIdentifier(FromIdentifierContext ctx) { - return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.FROM_UNQUOTED_IDENTIFIER()); + return ctx == null ? null : unquoteIdentifier(null, ctx.FROM_UNQUOTED_IDENTIFIER()); } protected static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java new file mode 100644 index 0000000000000..8bcf5c472b2d0 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.local; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.Arrays; + +/** + * A {@link LocalSupplier} that contains already filled {@link Block}s. + */ +class ImmediateLocalSupplier implements LocalSupplier { + private final Block[] blocks; + + ImmediateLocalSupplier(Block[] blocks) { + this.blocks = blocks; + } + + @Override + public Block[] get() { + return blocks; + } + + @Override + public String toString() { + return Arrays.toString(blocks); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeArray((o, v) -> ((PlanStreamOutput) o).writeCachedBlock(v), blocks); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + ImmediateLocalSupplier other = (ImmediateLocalSupplier) obj; + return Arrays.equals(blocks, other.blocks); + } + + @Override + public int hashCode() { + return Arrays.hashCode(blocks); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java index 7fa82359ffc45..3b81da06d7077 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java @@ -7,13 +7,25 @@ package org.elasticsearch.xpack.esql.plan.logical.local; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import java.util.Arrays; +import java.io.IOException; import java.util.function.Supplier; -public interface LocalSupplier extends Supplier { +/** + * Supplies fixed {@link Block}s for things calculated at plan time. + *

+ * This is {@link Writeable} so we can model {@code LOOKUP} and + * hash joins which have to go over the wire. But many implementers + * don't have to go over the wire and they should feel free to throw + * {@link UnsupportedOperationException}. + *

+ */ +public interface LocalSupplier extends Supplier, Writeable { LocalSupplier EMPTY = new LocalSupplier() { @Override @@ -25,19 +37,29 @@ public Block[] get() { public String toString() { return "EMPTY"; } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(0); + } + + @Override + public boolean equals(Object obj) { + return obj == EMPTY; + } + + @Override + public int hashCode() { + return 0; + } }; static LocalSupplier of(Block[] blocks) { - return new LocalSupplier() { - @Override - public Block[] get() { - return blocks; - } - - @Override - public String toString() { - return Arrays.toString(blocks); - } - }; + return new ImmediateLocalSupplier(blocks); + } + + static LocalSupplier readFrom(PlanStreamInput in) throws IOException { + Block[] blocks = in.readCachedBlockArray(); + return blocks.length == 0 ? EMPTY : of(blocks); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java index 2f5920a4e32c9..bedbd517f1184 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java @@ -93,7 +93,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(clusterAlias); out.writeString(sessionId); configuration.writeTo(out); - new PlanStreamOutput(out, planNameRegistry).writePhysicalPlanNode(plan); + new PlanStreamOutput(out, planNameRegistry, configuration).writePhysicalPlanNode(plan); out.writeStringArray(indices); out.writeStringArray(originalIndices); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 7b38197dde95a..d9005d5997b34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -205,6 +205,7 @@ public void execute( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new Result(collectedPages, collectedProfiles))) ) { // run compute on the coordinator + exchangeSource.addCompletionListener(refs.acquire()); runCompute( rootTask, new ComputeContext(sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, exchangeSource, null), @@ -722,6 +723,7 @@ private void runComputeOnDataNode( var externalSink = exchangeService.getSinkHandler(externalId); task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); + exchangeSource.addCompletionListener(refs.acquire()); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, 1); ActionListener reductionListener = cancelOnFailure(task, cancelled, refs.acquire()); runCompute( @@ -854,6 +856,7 @@ void runComputeOnRemoteCluster( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new ComputeResponse(collectedProfiles))) ) { exchangeSink.addCompletionListener(refs.acquire()); + exchangeSource.addCompletionListener(refs.acquire()); PhysicalPlan coordinatorPlan = new ExchangeSinkExec( plan.source(), plan.output(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java index 6c87b226aa590..b72feadd20c61 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java @@ -90,7 +90,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeCollection(shardIds); out.writeMap(aliasFilters); - new PlanStreamOutput(out, planNameRegistry).writePhysicalPlanNode(plan); + new PlanStreamOutput(out, planNameRegistry, configuration).writePhysicalPlanNode(plan); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index f6b534f7316df..4f852264193b4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -136,6 +136,17 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); + /** + * Support for loading values over enrich. This is supported by all versions of ESQL but not + * the unit test CsvTests. + */ + public static final NodeFeature ENRICH_LOAD = new NodeFeature("esql.enrich_load"); + + /** + * Support for timespan units abbreviations + */ + public static final NodeFeature TIMESPAN_ABBREVIATIONS = new NodeFeature("esql.timespan_abbreviations"); + @Override public Set getFeatures() { return Set.of( @@ -157,7 +168,8 @@ public Set getFeatures() { MV_ORDERING_SORTED_ASCENDING, METRICS_COUNTER_FIELDS, STRING_LITERAL_AUTO_CASTING_EXTENDED, - METADATA_FIELDS + METADATA_FIELDS, + TIMESPAN_ABBREVIATIONS ); } @@ -168,7 +180,8 @@ public Map getHistoricalFeatures() { Map.entry(MV_WARN, Version.V_8_12_0), Map.entry(SPATIAL_POINTS, Version.V_8_12_0), Map.entry(CONVERT_WARN, Version.V_8_12_0), - Map.entry(POW_DOUBLE, Version.V_8_12_0) + Map.entry(POW_DOUBLE, Version.V_8_12_0), + Map.entry(ENRICH_LOAD, Version.V_8_12_0) ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index e4c7983d9a83a..e1360c67976ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -234,18 +234,20 @@ public static DataType commonType(DataType left, DataType right) { return DataTypeConverter.commonType(left, right); } + // generally supporting abbreviations from https://en.wikipedia.org/wiki/Unit_of_time public static TemporalAmount parseTemporalAmout(Number value, String qualifier, Source source) throws InvalidArgumentException, ArithmeticException, ParsingException { return switch (qualifier) { - case "millisecond", "milliseconds" -> Duration.ofMillis(safeToLong(value)); - case "second", "seconds" -> Duration.ofSeconds(safeToLong(value)); - case "minute", "minutes" -> Duration.ofMinutes(safeToLong(value)); - case "hour", "hours" -> Duration.ofHours(safeToLong(value)); - - case "day", "days" -> Period.ofDays(safeToInt(safeToLong(value))); - case "week", "weeks" -> Period.ofWeeks(safeToInt(safeToLong(value))); - case "month", "months" -> Period.ofMonths(safeToInt(safeToLong(value))); - case "year", "years" -> Period.ofYears(safeToInt(safeToLong(value))); + case "millisecond", "milliseconds", "ms" -> Duration.ofMillis(safeToLong(value)); + case "second", "seconds", "sec", "s" -> Duration.ofSeconds(safeToLong(value)); + case "minute", "minutes", "min" -> Duration.ofMinutes(safeToLong(value)); + case "hour", "hours", "h" -> Duration.ofHours(safeToLong(value)); + + case "day", "days", "d" -> Period.ofDays(safeToInt(safeToLong(value))); + case "week", "weeks", "w" -> Period.ofWeeks(safeToInt(safeToLong(value))); + case "month", "months", "mo" -> Period.ofMonths(safeToInt(safeToLong(value))); + case "quarter", "quarters", "q" -> Period.ofMonths(safeToInt(Math.multiplyExact(3L, safeToLong(value)))); + case "year", "years", "yr", "y" -> Period.ofYears(safeToInt(safeToLong(value))); default -> throw new ParsingException(source, "Unexpected time interval qualifier: '{}'", qualifier); }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index c865b21723a9e..3539138e670eb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -224,6 +224,7 @@ public final void test() throws Throwable { * are tested in integration tests. */ assumeFalse("metadata fields aren't supported", testCase.requiredFeatures.contains(EsqlFeatures.METADATA_FIELDS.id())); + assumeFalse("enrich can't load fields in csv tests", testCase.requiredFeatures.contains(EsqlFeatures.ENRICH_LOAD.id())); doTest(); } catch (Throwable th) { throw reworkException(th); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index 312250d2f58d0..185fb14503cab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -79,7 +79,7 @@ public static T serializeDeserialize(T orig, Serializer serializer, Deser public static T serializeDeserialize(T orig, Serializer serializer, Deserializer deserializer, EsqlConfiguration config) { try (BytesStreamOutput out = new BytesStreamOutput()) { - PlanStreamOutput planStreamOutput = new PlanStreamOutput(out, planNameRegistry); + PlanStreamOutput planStreamOutput = new PlanStreamOutput(out, planNameRegistry, config); serializer.write(planStreamOutput, orig); StreamInput in = new NamedWriteableAwareStreamInput( ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index c19d48f3cd50e..1fd7cfe368068 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -226,7 +226,7 @@ public static ExpressionEvaluator.Factory evaluator(Expression e) { } protected final Page row(List values) { - return new Page(BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); + return new Page(1, BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); } /** @@ -249,7 +249,8 @@ protected final void assertResolveTypeValid(Expression expression, DataType expe } public final void testEvaluate() { - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); logger.info( "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) ); @@ -306,7 +307,13 @@ private Object toJavaObjectUnsignedLongAware(Block block, int position) { *

*/ public final void testEvaluateBlockWithoutNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -314,7 +321,13 @@ public final void testEvaluateBlockWithoutNulls() { * some null values inserted between. */ public final void testEvaluateBlockWithNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -355,13 +368,14 @@ protected Matcher allNullsMatcher() { } private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { - assumeTrue("can only run on representable types", testCase.allTypesAreRepresentable()); - assumeTrue("must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); + assumeTrue("Must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); boolean readFloating = randomBoolean(); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); - Block[] manyPositionsBlocks = new Block[data.size()]; + Block[] manyPositionsBlocks = new Block[Math.toIntExact(data.stream().filter(d -> d.isForceLiteral() == false).count())]; Set nullPositions = insertNulls ? IntStream.range(0, positions).filter(i -> randomBoolean()).mapToObj(Integer::valueOf).collect(Collectors.toSet()) : Set.of(); @@ -369,8 +383,12 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con nullPositions = Set.of(); } try { - for (int b = 0; b < data.size(); b++) { - ElementType elementType = PlannerUtils.toElementType(data.get(b).type()); + int b = 0; + for (TestCaseSupplier.TypedData d : data) { + if (d.isForceLiteral()) { + continue; + } + ElementType elementType = PlannerUtils.toElementType(d.type()); try (Block.Builder builder = elementType.newBlockBuilder(positions, inputBlockFactory)) { for (int p = 0; p < positions; p++) { if (nullPositions.contains(p)) { @@ -381,9 +399,13 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } manyPositionsBlocks[b] = builder.build(); } + b++; } Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); - try (ExpressionEvaluator eval = evaluator(expression).get(context); Block block = eval.eval(new Page(manyPositionsBlocks))) { + try ( + ExpressionEvaluator eval = evaluator(expression).get(context); + Block block = eval.eval(new Page(positions, manyPositionsBlocks)) + ) { for (int p = 0; p < positions; p++) { if (nullPositions.contains(p)) { assertThat(toJavaObject(block, p), allNullsMatcher()); @@ -408,8 +430,8 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con // TODO cranky time public void testSimpleWithNulls() { // TODO replace this with nulls inserted into the test case like anyNullIsNull - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); List simpleData = testCase.getDataValues(); try (EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(driverContext())) { BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); @@ -450,8 +472,9 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo } public final void testEvaluateInManyThreads() throws ExecutionException, InterruptedException { - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); + assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); int count = 10_000; int threads = 5; var evalSupplier = evaluator(buildFieldExpression(testCase)); @@ -481,8 +504,8 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru } public final void testEvaluatorToString() { - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); var factory = evaluator(buildFieldExpression(testCase)); try (ExpressionEvaluator ev = factory.get(driverContext())) { assertThat(ev.toString(), testCase.evaluatorToString()); @@ -490,8 +513,8 @@ public final void testEvaluatorToString() { } public final void testFactoryToString() { - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); var factory = evaluator(buildFieldExpression(testCase)); assertThat(factory.toString(), testCase.evaluatorToString()); } @@ -524,7 +547,6 @@ public final void testFold() { } public void testSerializationOfSimple() { - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); assertSerialization(buildFieldExpression(testCase)); } @@ -594,7 +616,7 @@ protected static List anyNullIsNull(boolean entirelyNullPreser (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false && nullValueDataType == DataTypes.NULL && original.getData().size() == 1 ? DataTypes.NULL : original.expectedType(), - (nullPosition, original) -> original + (nullPosition, nullData, original) -> original ); } @@ -603,7 +625,7 @@ public interface ExpectedType { } public interface ExpectedEvaluatorToString { - Matcher evaluatorToString(int nullPosition, Matcher original); + Matcher evaluatorToString(int nullPosition, TestCaseSupplier.TypedData nullData, Matcher original); } protected static List anyNullIsNull( @@ -635,10 +657,11 @@ protected static List anyNullIsNull( TestCaseSupplier.TypedData od = oc.getData().get(i); return i == finalNullPosition ? od.forceValueToNull() : od; }).toList(); + TestCaseSupplier.TypedData nulledData = oc.getData().get(finalNullPosition); return new TestCaseSupplier.TestCase( data, - evaluatorToString.evaluatorToString(finalNullPosition, oc.evaluatorToString()), - expectedType.expectedType(finalNullPosition, oc.getData().get(finalNullPosition).type(), oc), + evaluatorToString.evaluatorToString(finalNullPosition, nulledData, oc.evaluatorToString()), + expectedType.expectedType(finalNullPosition, nulledData.type(), oc), nullValue(), null, oc.getExpectedTypeError(), @@ -1532,17 +1555,18 @@ private static void writeToTempDir(String subdir, String str, String extension) private final List breakers = Collections.synchronizedList(new ArrayList<>()); protected final DriverContext driverContext() { - MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } protected final DriverContext crankyContext() { - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()) + .withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } @After diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 7cfe950bb3144..d9261a1658969 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -92,7 +92,7 @@ public static List stringCases( expected, lhsSuppliers, rhsSuppliers, - evaluatorToString, + (lhs, rhs) -> equalTo(evaluatorToString.apply(lhs, rhs)), (lhs, rhs) -> warnings, suppliers, expectedType, @@ -202,16 +202,18 @@ public static List forBinaryCastingToDouble( (l, r) -> expected.apply(((Number) l).doubleValue(), ((Number) r).doubleValue()), lhsSuppliers, rhsSuppliers, - (lhsType, rhsType) -> name - + "[" - + lhsName - + "=" - + castToDoubleEvaluator("Attribute[channel=0]", lhsType) - + ", " - + rhsName - + "=" - + castToDoubleEvaluator("Attribute[channel=1]", rhsType) - + "]", + (lhsType, rhsType) -> equalTo( + name + + "[" + + lhsName + + "=" + + castToDoubleEvaluator("Attribute[channel=0]", lhsType) + + ", " + + rhsName + + "=" + + castToDoubleEvaluator("Attribute[channel=1]", rhsType) + + "]" + ), (lhs, rhs) -> warnings, suppliers, DataTypes.DOUBLE, @@ -224,7 +226,7 @@ public static void casesCrossProduct( BinaryOperator expected, List lhsSuppliers, List rhsSuppliers, - BiFunction evaluatorToString, + BiFunction> evaluatorToString, BiFunction> warnings, List suppliers, DataType expectedType, @@ -243,7 +245,7 @@ public static void casesCrossProduct( public static TestCaseSupplier testCaseSupplier( TypedDataSupplier lhsSupplier, TypedDataSupplier rhsSupplier, - BiFunction evaluatorToString, + BiFunction> evaluatorToString, DataType expectedType, BinaryOperator expectedValue ) { @@ -253,7 +255,7 @@ public static TestCaseSupplier testCaseSupplier( private static TestCaseSupplier testCaseSupplier( TypedDataSupplier lhsSupplier, TypedDataSupplier rhsSupplier, - BiFunction evaluatorToString, + BiFunction> evaluatorToString, DataType expectedType, BinaryOperator expectedValue, BiFunction> warnings @@ -366,7 +368,7 @@ public static List forBinaryComparisonWithWidening( (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), allowRhsZero), getSuppliersForNumericType(rhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), allowRhsZero), - evaluatorToString, + (lhs, rhs) -> equalTo(evaluatorToString.apply(lhs, rhs)), warnings, suppliers, DataTypes.BOOLEAN, @@ -391,16 +393,18 @@ public static List forBinaryWithWidening( for (DataType rhsType : numericTypes) { DataType expected = widen(lhsType, rhsType); NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + lhsName - + "=" - + getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + rhsName - + "=" - + getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; + BiFunction> evaluatorToString = (lhs, rhs) -> equalTo( + expectedTypeStuff.evaluatorName() + + "[" + + lhsName + + "=" + + getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + rhsName + + "=" + + getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]" + ); casesCrossProduct( (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), @@ -429,26 +433,22 @@ public static List forBinaryNotCasting( boolean symmetric ) { return forBinaryNotCasting( - name, - lhsName, - rhsName, expected, expectedType, lhsSuppliers, rhsSuppliers, + equalTo(name + "[" + lhsName + "=Attribute[channel=0], " + rhsName + "=Attribute[channel=1]]"), (lhs, rhs) -> warnings, symmetric ); } public static List forBinaryNotCasting( - String name, - String lhsName, - String rhsName, BinaryOperator expected, DataType expectedType, List lhsSuppliers, List rhsSuppliers, + Matcher evaluatorToString, BiFunction> warnings, boolean symmetric ) { @@ -457,7 +457,7 @@ public static List forBinaryNotCasting( expected, lhsSuppliers, rhsSuppliers, - (lhsType, rhsType) -> name + "[" + lhsName + "=Attribute[channel=0], " + rhsName + "=Attribute[channel=1]]", + (lhsType, rhsType) -> evaluatorToString, warnings, suppliers, expectedType, @@ -1006,7 +1006,7 @@ public static List dateCases() { public static List datePeriodCases() { return List.of( - new TypedDataSupplier("", () -> Period.ZERO, EsqlDataTypes.DATE_PERIOD), + new TypedDataSupplier("", () -> Period.ZERO, EsqlDataTypes.DATE_PERIOD, true), new TypedDataSupplier( "", () -> Period.of( @@ -1014,18 +1014,20 @@ public static List datePeriodCases() { ESTestCase.randomIntBetween(-13, 13), ESTestCase.randomIntBetween(-32, 32) ), - EsqlDataTypes.DATE_PERIOD + EsqlDataTypes.DATE_PERIOD, + true ) ); } public static List timeDurationCases() { return List.of( - new TypedDataSupplier("", () -> Duration.ZERO, EsqlDataTypes.TIME_DURATION), + new TypedDataSupplier("", () -> Duration.ZERO, EsqlDataTypes.TIME_DURATION, true), new TypedDataSupplier( "", () -> Duration.ofMillis(ESTestCase.randomLongBetween(-604800000L, 604800000L)), // plus/minus 7 days - EsqlDataTypes.TIME_DURATION + EsqlDataTypes.TIME_DURATION, + true ) ); } @@ -1237,7 +1239,7 @@ public static class TestCase { private final String[] expectedWarnings; private final String expectedTypeError; - private final boolean allTypesAreRepresentable; + private final boolean canBuildEvaluator; private final Class foldingExceptionClass; private final String foldingExceptionMessage; @@ -1271,7 +1273,7 @@ public static TestCase typeError(List data, String expectedTypeError) this.matcher = matcher; this.expectedWarnings = expectedWarnings; this.expectedTypeError = expectedTypeError; - this.allTypesAreRepresentable = data.stream().allMatch(d -> EsqlDataTypes.isRepresentable(d.type)); + this.canBuildEvaluator = data.stream().allMatch(d -> d.forceLiteral || EsqlDataTypes.isRepresentable(d.type)); this.foldingExceptionClass = foldingExceptionClass; this.foldingExceptionMessage = foldingExceptionMessage; } @@ -1297,11 +1299,11 @@ public List getDataAsLiterals() { } public List getDataValues() { - return data.stream().map(t -> t.data()).collect(Collectors.toList()); + return data.stream().filter(d -> d.forceLiteral == false).map(TypedData::data).collect(Collectors.toList()); } - public boolean allTypesAreRepresentable() { - return allTypesAreRepresentable; + public boolean canBuildEvaluator() { + return canBuildEvaluator; } public Matcher getMatcher() { @@ -1428,6 +1430,13 @@ public TypedData forceLiteral() { return new TypedData(data, type, name, true); } + /** + * Has this been forced to a {@link Literal}. + */ + public boolean isForceLiteral() { + return forceLiteral; + } + /** * Return a {@link TypedData} that always returns {@code null} for it's * value without modifying anything else in the supplier. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java index cc2714dc31dca..a73b4a0dfa557 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java @@ -46,14 +46,16 @@ public static Iterable parameters() { "fixed date with period", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-01-01T00:00:00.00Z"), EsqlDataTypes.DATE_PERIOD, - Period.ofYears(1) + Period.ofYears(1), + "[YEAR_OF_CENTURY in Z][fixed to midnight]" ); dateCasesWithSpan( suppliers, "fixed date with duration", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z"), EsqlDataTypes.TIME_DURATION, - Duration.ofDays(1L) + Duration.ofDays(1L), + "[86400000 in Z][fixed]" ); numberCases(suppliers, "fixed long", DataTypes.LONG, () -> 100L); numberCasesWithSpan(suppliers, "fixed long with span", DataTypes.LONG, () -> 100L); @@ -68,7 +70,7 @@ public static Iterable parameters() { (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL ? DataTypes.NULL : original.expectedType(), - (nullPosition, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") + (nullPosition, nullData, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") ) ); } @@ -112,7 +114,8 @@ private static void dateCasesWithSpan( String name, LongSupplier date, DataType spanType, - Object span + Object span, + String spanStr ) { suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, spanType), () -> { List args = new ArrayList<>(); @@ -120,7 +123,7 @@ private static void dateCasesWithSpan( args.add(new TestCaseSupplier.TypedData(span, spanType, "buckets").forceLiteral()); return new TestCaseSupplier.TestCase( args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", + "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding" + spanStr + "]", DataTypes.DATETIME, dateResultsMatcher(args) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index 30460828aaa91..097f3c1038cfb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -64,7 +64,7 @@ public static Iterable parameters() { suppliers = anyNullIsNull( suppliers, (nullPosition, nullValueDataType, original) -> nullPosition == 0 ? nullValueDataType : original.expectedType(), - (nullPosition, original) -> original + (nullPosition, nullData, original) -> original ); suppliers.add(new TestCaseSupplier("two doubles", List.of(DataTypes.DOUBLE, DataTypes.INTEGER), () -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java index 260813bacb8f2..bf16344847bde 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java @@ -48,7 +48,7 @@ public static Iterable parameters() { (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL ? DataTypes.NULL : original.expectedType(), - (nullPosition, original) -> original + (nullPosition, nullData, original) -> original ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java index e905f85141f31..64e03dec6b064 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; +import org.hamcrest.Matcher; import java.io.IOException; import java.lang.reflect.Field; @@ -28,6 +29,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatialGeo; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; +import static org.hamcrest.Matchers.equalTo; public abstract class SpatialRelatesFunctionTestCase extends AbstractFunctionTestCase { @@ -188,11 +190,11 @@ private static DataType pickSpatialType(DataType leftType, DataType rightType) { } } - private static String spatialEvaluatorString(DataType leftType, DataType rightType) { + private static Matcher spatialEvaluatorString(DataType leftType, DataType rightType) { String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; - return getFunctionClassName() - + crsType - + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]"; + return equalTo( + getFunctionClassName() + crsType + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]" + ); } private static int countGeo(DataType... types) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 2daf2688d6631..25ccd91f43d07 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -33,8 +33,10 @@ import static org.elasticsearch.xpack.ql.type.DateUtils.asDateTime; import static org.elasticsearch.xpack.ql.type.DateUtils.asMillis; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; public class AddTests extends AbstractFunctionTestCase { public AddTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -94,27 +96,23 @@ public static Iterable parameters() { suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - "No evaluator, the tests only trigger the folding code since Period is not representable", - "lhs", - "rhs", (lhs, rhs) -> ((Period) lhs).plus((Period) rhs), EsqlDataTypes.DATE_PERIOD, TestCaseSupplier.datePeriodCases(), TestCaseSupplier.datePeriodCases(), - List.of(), + startsWith("LiteralsEvaluator[lit="), // lhs and rhs have to be literals, so we fold into a literal + (lhs, rhs) -> List.of(), true ) ); suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - "No evaluator, the tests only trigger the folding code since Duration is not representable", - "lhs", - "rhs", (lhs, rhs) -> ((Duration) lhs).plus((Duration) rhs), EsqlDataTypes.TIME_DURATION, TestCaseSupplier.timeDurationCases(), TestCaseSupplier.timeDurationCases(), - List.of(), + startsWith("LiteralsEvaluator[lit="), // lhs and rhs have to be literals, so we fold into a literal + (lhs, rhs) -> List.of(), true ) ); @@ -139,28 +137,22 @@ public static Iterable parameters() { }; suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - // TODO: There is an evaluator for Datetime + Period, so it should be tested. Similarly below. - "No evaluator, the tests only trigger the folding code since Period is not representable", - "lhs", - "rhs", result, DataTypes.DATETIME, TestCaseSupplier.dateCases(), TestCaseSupplier.datePeriodCases(), + startsWith("AddDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), warnings, true ) ); suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - // TODO: There is an evaluator for Datetime + Duration, so it should be tested. Similarly above. - "No evaluator, the tests only trigger the folding code since Duration is not representable", - "lhs", - "rhs", result, DataTypes.DATETIME, TestCaseSupplier.dateCases(), TestCaseSupplier.timeDurationCases(), + startsWith("AddDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), warnings, true ) @@ -192,7 +184,12 @@ public static Iterable parameters() { // Datetime tests are split in two, depending on their permissiveness of null-injection, which cannot happen "automatically" for // Datetime + Period/Duration, since the expression will take the non-null arg's type. - suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AddTests::addErrorMessageString); + suppliers = anyNullIsNull( + suppliers, + (nullPosition, nullType, original) -> original.expectedType(), + (nullPosition, nullData, original) -> nullData.isForceLiteral() ? equalTo("LiteralsEvaluator[lit=null]") : original + ); + suppliers = errorsForCasesWithoutExamples(suppliers, AddTests::addErrorMessageString); // Cases that should generate warnings suppliers.addAll(List.of(new TestCaseSupplier("MV", () -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index f3348ab2dcba5..eb29a7b5ce06e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; import java.math.BigInteger; import java.util.ArrayList; @@ -24,6 +25,8 @@ import java.util.function.BiFunction; import java.util.function.Supplier; +import static org.hamcrest.Matchers.equalTo; + public class DivTests extends AbstractFunctionTestCase { public DivTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -116,16 +119,18 @@ public static Iterable parameters() { for (DataType rhsType : numericTypes) { DataType expected = TestCaseSupplier.widen(lhsType, rhsType); TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + "lhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + "rhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; + BiFunction> evaluatorToString = (lhs, rhs) -> equalTo( + expectedTypeStuff.evaluatorName() + + "[" + + "lhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + "rhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]" + ); TestCaseSupplier.casesCrossProduct( (l1, r1) -> expectedTypeStuff.expected().apply((Number) l1, (Number) r1), TestCaseSupplier.getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index a70f2c7885257..bc6d6dd97c3ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; import java.math.BigInteger; import java.util.ArrayList; @@ -24,6 +25,8 @@ import java.util.function.BiFunction; import java.util.function.Supplier; +import static org.hamcrest.Matchers.equalTo; + public class ModTests extends AbstractFunctionTestCase { public ModTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -103,16 +106,18 @@ public static Iterable parameters() { for (DataType rhsType : numericTypes) { DataType expected = TestCaseSupplier.widen(lhsType, rhsType); TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + "lhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + "rhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; + BiFunction> evaluatorToString = (lhs, rhs) -> equalTo( + expectedTypeStuff.evaluatorName() + + "[" + + "lhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + "rhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]" + ); TestCaseSupplier.casesCrossProduct( (l1, r1) -> expectedTypeStuff.expected().apply((Number) l1, (Number) r1), TestCaseSupplier.getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index f5e5e9f406f22..c65f4eed2de70 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -155,7 +155,7 @@ public void testEdgeCases() { } private Object process(Object val) { - if (testCase.allTypesAreRepresentable()) { + if (testCase.canBuildEvaluator()) { Neg neg = new Neg(Source.EMPTY, field("val", typeOf(val))); try (Block block = evaluator(neg).get(driverContext()).eval(row(List.of(val)))) { return toJavaObject(block, 0); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java index 23f9e93ac72a6..cfa3b4a8ea6ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java @@ -207,7 +207,7 @@ public void testWrappedStreamSimple() throws IOException { // write BytesStreamOutput bso = new BytesStreamOutput(); bso.writeString("hello"); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); var plan = new RowExec(Source.EMPTY, List.of(new Alias(Source.EMPTY, "foo", field("field", DataTypes.LONG)))); out.writePhysicalPlanNode(plan); bso.writeVInt(11_345); @@ -230,7 +230,7 @@ public void testUnsupportedAttributeSimple() throws IOException { new NameId() ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeUnsupportedAttr(out, orig); var in = planStreamInput(bso); var deser = PlanNamedTypes.readUnsupportedAttr(in); @@ -255,7 +255,7 @@ public void testFieldAttributeSimple() throws IOException { true // synthetic ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeFieldAttribute(out, orig); var in = planStreamInput(bso); var deser = PlanNamedTypes.readFieldAttribute(in); @@ -277,7 +277,7 @@ public void testKeywordEsFieldSimple() throws IOException { true // alias ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeKeywordEsField(out, orig); var deser = PlanNamedTypes.readKeywordEsField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -295,7 +295,7 @@ public void testTextdEsFieldSimple() throws IOException { true // alias ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeTextEsField(out, orig); var deser = PlanNamedTypes.readTextEsField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -308,7 +308,7 @@ public void testTextEsField() { public void testInvalidMappedFieldSimple() throws IOException { var orig = new InvalidMappedField("foo", "bar"); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeInvalidMappedField(out, orig); var deser = PlanNamedTypes.readInvalidMappedField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -321,7 +321,7 @@ public void testInvalidMappedField() { public void testEsDateFieldSimple() throws IOException { var orig = DateEsField.dateEsField("birth_date", Map.of(), false); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeDateEsField(out, orig); var deser = PlanNamedTypes.readDateEsField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -330,7 +330,7 @@ public void testEsDateFieldSimple() throws IOException { public void testBinComparisonSimple() throws IOException { var orig = new Equals(Source.EMPTY, field("foo", DataTypes.DOUBLE), field("bar", DataTypes.DOUBLE)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(EsqlBinaryComparison.class, orig); var deser = (Equals) planStreamInput(bso).readNamed(EsqlBinaryComparison.class); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -345,7 +345,7 @@ public void testBinComparison() { public void testAggFunctionSimple() throws IOException { var orig = new Avg(Source.EMPTY, field("foo_val", DataTypes.DOUBLE)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(AggregateFunction.class, orig); var deser = (Avg) planStreamInput(bso).readNamed(AggregateFunction.class); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -358,7 +358,7 @@ public void testAggFunction() { public void testArithmeticOperationSimple() throws IOException { var orig = new Add(Source.EMPTY, field("foo", DataTypes.LONG), field("bar", DataTypes.LONG)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(ArithmeticOperation.class, orig); var deser = (Add) planStreamInput(bso).readNamed(ArithmeticOperation.class); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -373,7 +373,7 @@ public void testArithmeticOperation() { public void testSubStringSimple() throws IOException { var orig = new Substring(Source.EMPTY, field("foo", DataTypes.KEYWORD), new Literal(Source.EMPTY, 1, DataTypes.INTEGER), null); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeSubstring(out, orig); var deser = PlanNamedTypes.readSubstring(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -382,7 +382,7 @@ public void testSubStringSimple() throws IOException { public void testStartsWithSimple() throws IOException { var orig = new StartsWith(Source.EMPTY, field("foo", DataTypes.KEYWORD), new Literal(Source.EMPTY, "fo", DataTypes.KEYWORD)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeStartsWith(out, orig); var deser = PlanNamedTypes.readStartsWith(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -391,7 +391,7 @@ public void testStartsWithSimple() throws IOException { public void testRoundSimple() throws IOException { var orig = new Round(Source.EMPTY, field("value", DataTypes.DOUBLE), new Literal(Source.EMPTY, 1, DataTypes.INTEGER)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeRound(out, orig); var deser = PlanNamedTypes.readRound(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -400,7 +400,7 @@ public void testRoundSimple() throws IOException { public void testPowSimple() throws IOException { var orig = new Pow(Source.EMPTY, field("value", DataTypes.DOUBLE), new Literal(Source.EMPTY, 1, DataTypes.INTEGER)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writePow(out, orig); var deser = PlanNamedTypes.readPow(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -409,7 +409,7 @@ public void testPowSimple() throws IOException { public void testAliasSimple() throws IOException { var orig = new Alias(Source.EMPTY, "alias_name", field("a", DataTypes.LONG)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeAlias(out, orig); var in = planStreamInput(bso); var deser = PlanNamedTypes.readAlias(in); @@ -420,7 +420,7 @@ public void testAliasSimple() throws IOException { public void testLiteralSimple() throws IOException { var orig = new Literal(Source.EMPTY, 1, DataTypes.INTEGER); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeLiteral(out, orig); var deser = PlanNamedTypes.readLiteral(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -429,7 +429,7 @@ public void testLiteralSimple() throws IOException { public void testOrderSimple() throws IOException { var orig = new Order(Source.EMPTY, field("val", DataTypes.INTEGER), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeOrder(out, orig); var deser = (Order) PlanNamedTypes.readOrder(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -438,7 +438,7 @@ public void testOrderSimple() throws IOException { public void testFieldSortSimple() throws IOException { var orig = new EsQueryExec.FieldSort(field("val", DataTypes.LONG), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeFieldSort(out, orig); var deser = PlanNamedTypes.readFieldSort(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -447,7 +447,7 @@ public void testFieldSortSimple() throws IOException { public void testEsIndexSimple() throws IOException { var orig = new EsIndex("test*", Map.of("first_name", new KeywordEsField("first_name")), Set.of("test1", "test2")); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeEsIndex(out, orig); var deser = PlanNamedTypes.readEsIndex(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -457,7 +457,7 @@ public void testDissectParserSimple() throws IOException { String pattern = "%{b} %{c}"; var orig = new Dissect.Parser(pattern, ",", new DissectParser(pattern, ",")); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeDissectParser(out, orig); var deser = PlanNamedTypes.readDissectParser(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -466,7 +466,7 @@ public void testDissectParserSimple() throws IOException { public void testEsRelation() throws IOException { var orig = new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomEsSourceOptions(), randomBoolean()); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeEsRelation(out, orig); var deser = PlanNamedTypes.readEsRelation(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -479,7 +479,7 @@ public void testEsqlProject() throws IOException { List.of(randomFieldAttribute()) ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeEsqlProject(out, orig); var deser = PlanNamedTypes.readEsqlProject(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -495,7 +495,7 @@ public void testMvExpand() throws IOException { ); var orig = new MvExpand(Source.EMPTY, esRelation, randomFieldAttribute(), randomFieldAttribute()); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeMvExpand(out, orig); var deser = PlanNamedTypes.readMvExpand(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index 7f683e8f8003b..bc69b4454df81 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -8,23 +8,130 @@ package org.elasticsearch.xpack.esql.io.stream; import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.xpack.esql.Column; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.esql.session.EsqlConfigurationSerializationTests; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.io.IOException; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class PlanStreamOutputTests extends ESTestCase { - public void testTransportVersion() { + public void testTransportVersion() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); TransportVersion v1 = TransportVersionUtils.randomCompatibleVersion(random()); out.setTransportVersion(v1); - PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE); + PlanStreamOutput planOut = new PlanStreamOutput( + out, + PlanNameRegistry.INSTANCE, + randomBoolean() ? null : EsqlConfigurationSerializationTests.randomConfiguration() + ); assertThat(planOut.getTransportVersion(), equalTo(v1)); TransportVersion v2 = TransportVersionUtils.randomCompatibleVersion(random()); planOut.setTransportVersion(v2); assertThat(planOut.getTransportVersion(), equalTo(v2)); assertThat(out.getTransportVersion(), equalTo(v2)); } + + public void testWriteBlockFromConfig() throws IOException { + String tableName = randomAlphaOfLength(5); + String columnName = randomAlphaOfLength(10); + try (Column c = randomColumn()) { + EsqlConfiguration configuration = randomConfiguration(Map.of(tableName, Map.of(columnName, c))); + try ( + BytesStreamOutput out = new BytesStreamOutput(); + PlanStreamOutput planStream = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, configuration) + ) { + planStream.writeCachedBlock(c.values()); + assertThat(out.bytes().length(), equalTo(3 + tableName.length() + columnName.length())); + try ( + PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), PlanNameRegistry.INSTANCE, REGISTRY, configuration) + ) { + assertThat(in.readCachedBlock(), sameInstance(c.values())); + } + } + } + } + + public void testWriteBlockOnce() throws IOException { + try (Block b = randomColumn().values()) { + EsqlConfiguration configuration = EsqlConfigurationSerializationTests.randomConfiguration(); + try ( + BytesStreamOutput out = new BytesStreamOutput(); + PlanStreamOutput planStream = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, configuration) + ) { + planStream.writeCachedBlock(b); + assertThat(out.bytes().length(), greaterThan(4 * LEN)); + assertThat(out.bytes().length(), lessThan(8 * LEN)); + try ( + PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), PlanNameRegistry.INSTANCE, REGISTRY, configuration) + ) { + Block read = in.readCachedBlock(); + assertThat(read, not(sameInstance(b))); + assertThat(read, equalTo(b)); + } + } + } + } + + public void testWriteBlockTwice() throws IOException { + try (Block b = randomColumn().values()) { + EsqlConfiguration configuration = EsqlConfigurationSerializationTests.randomConfiguration(); + try ( + BytesStreamOutput out = new BytesStreamOutput(); + PlanStreamOutput planStream = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, configuration) + ) { + planStream.writeCachedBlock(b); + planStream.writeCachedBlock(b); + assertThat(out.bytes().length(), greaterThan(4 * LEN)); + assertThat(out.bytes().length(), lessThan(8 * LEN)); + try ( + PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), PlanNameRegistry.INSTANCE, REGISTRY, configuration) + ) { + Block read = in.readCachedBlock(); + assertThat(read, not(sameInstance(b))); + assertThat(read, equalTo(b)); + assertThat(in.readCachedBlock(), sameInstance(read)); + } + } + } + } + + private EsqlConfiguration randomConfiguration(Map> tables) { + return EsqlConfigurationSerializationTests.randomConfiguration("query_" + randomAlphaOfLength(1), tables); + } + + private static final int LEN = 10000; + + private Column randomColumn() { + try (IntBlock.Builder ints = BLOCK_FACTORY.newIntBlockBuilder(LEN)) { + for (int i = 0; i < LEN; i++) { + ints.appendInt(randomInt()); + } + return new Column(DataTypes.INTEGER, ints.build()); + } + } + + private static final BlockFactory BLOCK_FACTORY = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + private static final NamedWriteableRegistry REGISTRY = new NamedWriteableRegistry(Block.getNamedWriteables()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index a0f226946cc36..9157f186ade92 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -380,14 +380,18 @@ public void testDurationLiterals() { assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 second")); assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + "second")); assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " seconds")); + assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " sec")); + assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " s")); assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 minute")); assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + "minute")); assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + " minutes")); + assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + " min")); assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 hour")); assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + "hour")); assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + " hours")); + assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + " h")); assertEquals(l(Duration.ofHours(-value), TIME_DURATION), whereExpression("-" + value + " hours")); } @@ -395,22 +399,33 @@ public void testDurationLiterals() { public void testDatePeriodLiterals() { int value = randomInt(Integer.MAX_VALUE); int weeksValue = randomInt(Integer.MAX_VALUE / 7); + int quartersValue = randomInt(Integer.MAX_VALUE / 3); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 day")); assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + "day")); assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + " days")); + assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + " d")); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0week")); assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + "week")); assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + " weeks")); + assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + " w")); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 month")); assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + "month")); assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + " months")); + assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + " mo")); + + assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 quarter")); + assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " quarter")); + assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " quarters")); + assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " q")); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0year")); assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + "year")); assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " years")); + assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " yr")); + assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " y")); assertEquals(l(Period.ofYears(-value), DATE_PERIOD), whereExpression("-" + value + " years")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index cf0dfa372ea3f..1a36616cb647b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -338,17 +338,17 @@ public void testInlineStatsWithoutGroups() { } public void testIdentifiersAsIndexPattern() { - assertIdentifierAsIndexPattern("foo", "from `foo`"); - assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); + // assertIdentifierAsIndexPattern("foo", "from `foo`"); + // assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); assertIdentifierAsIndexPattern("foo,test-*", "from foo,test-*"); assertIdentifierAsIndexPattern("123-test@foo_bar+baz1", "from 123-test@foo_bar+baz1"); - assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); + // assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); assertIdentifierAsIndexPattern("foo,test,xyz", "from foo, test,xyz"); assertIdentifierAsIndexPattern( - ",", - "from , ``" + "", // , + "from " // , `` ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java new file mode 100644 index 0000000000000..4206adf1492fd --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.local; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.test.AbstractWireTestCase; +import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.Arrays; + +public class LocalSupplierTests extends AbstractWireTestCase { + private static final BlockFactory BLOCK_FACTORY = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + @Override + protected LocalSupplier copyInstance(LocalSupplier instance, TransportVersion version) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setTransportVersion(version); + instance.writeTo(new PlanStreamOutput(output, PlanNameRegistry.INSTANCE, null)); + try (StreamInput in = output.bytes().streamInput()) { + in.setTransportVersion(version); + return LocalSupplier.readFrom(new PlanStreamInput(in, PlanNameRegistry.INSTANCE, getNamedWriteableRegistry(), null)); + } + } + } + + @Override + protected LocalSupplier createTestInstance() { + return randomBoolean() ? LocalSupplier.EMPTY : randomNonEmpty(); + } + + private LocalSupplier randomNonEmpty() { + return LocalSupplier.of(randomList(1, 10, LocalSupplierTests::randomBlock).toArray(Block[]::new)); + } + + @Override + protected LocalSupplier mutateInstance(LocalSupplier instance) throws IOException { + Block[] blocks = instance.get(); + if (blocks.length > 0 && randomBoolean()) { + if (randomBoolean()) { + return LocalSupplier.EMPTY; + } + return LocalSupplier.of(Arrays.copyOf(blocks, blocks.length - 1, Block[].class)); + } + blocks = Arrays.copyOf(blocks, blocks.length + 1, Block[].class); + blocks[blocks.length - 1] = randomBlock(); + return LocalSupplier.of(blocks); + } + + private static Block randomBlock() { + int len = between(1, 1000); + try (IntBlock.Builder ints = BLOCK_FACTORY.newIntBlockBuilder(len)) { + for (int i = 0; i < len; i++) { + ints.appendInt(randomInt()); + } + return ints.build(); + } + } + + @Override + protected boolean shouldBeSame(LocalSupplier newInstance) { + return newInstance.get().length == 0; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(Block.getNamedWriteables()); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java index 45d57b2fa411e..c9c5091db2894 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java @@ -86,7 +86,7 @@ protected DataNodeRequest createTestInstance() { ); DataNodeRequest request = new DataNodeRequest( sessionId, - EsqlConfigurationSerializationTests.randomConfiguration(query), + EsqlConfigurationSerializationTests.randomConfiguration(query, EsqlConfigurationSerializationTests.randomTables()), randomAlphaOfLength(10), shardIds, aliasFilters, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java index 3e91321651928..41c39e88b943e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java @@ -53,10 +53,10 @@ private static QueryPragmas randomQueryPragmas() { public static EsqlConfiguration randomConfiguration() { int len = randomIntBetween(1, 300) + (frequently() ? 0 : QUERY_COMPRESS_THRESHOLD_CHARS); - return randomConfiguration(randomRealisticUnicodeOfLength(len)); + return randomConfiguration(randomRealisticUnicodeOfLength(len), randomTables()); } - public static EsqlConfiguration randomConfiguration(String query) { + public static EsqlConfiguration randomConfiguration(String query, Map> tables) { var zoneId = randomZone(); var locale = randomLocale(random()); var username = randomAlphaOfLengthBetween(1, 10); @@ -75,11 +75,11 @@ public static EsqlConfiguration randomConfiguration(String query) { defaultTruncation, query, profile, - randomTables() + tables ); } - static Map> randomTables() { + public static Map> randomTables() { if (randomBoolean()) { return Map.of(); } diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 0aef8601ffcc6..3e2171d0654d5 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -4,6 +4,8 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' @@ -36,6 +38,12 @@ dependencies { api "com.ibm.icu:icu4j:${versions.icu4j}" } +if (BuildParams.isSnapshotBuild() == false) { + tasks.named("test").configure { + systemProperty 'es.semantic_text_feature_flag_enabled', 'true' + } +} + tasks.named('yamlRestTest') { usesDefaultDistribution() } diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java index db5e62a367ab3..d475fd099d4ac 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java @@ -59,16 +59,16 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(OpenAiServiceUpgradeIT.embeddingResponse())); put(oldClusterId, embeddingConfig(getUrl(openAiEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("azureopenai", configs.get(0).get("service")); assertEmbeddingInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); // Inference on old cluster model @@ -77,7 +77,7 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(OpenAiServiceUpgradeIT.embeddingResponse())); put(upgradedClusterId, embeddingConfig(getUrl(openAiEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); // Inference on the new config diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java index c73827dba2cbb..c889d8f9b312a 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java @@ -71,7 +71,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); put(oldClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); assertThat(configs, hasSize(1)); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); @@ -83,7 +83,7 @@ public void testCohereEmbeddings() throws IOException { assertEmbeddingInference(oldClusterIdInt8, CohereEmbeddingType.BYTE); assertEmbeddingInference(oldClusterIdFloat, CohereEmbeddingType.FLOAT); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "embed-english-light-v3.0")); @@ -91,7 +91,7 @@ public void testCohereEmbeddings() throws IOException { // An upgraded node will report the embedding type as byte, an old node int8 assertThat(embeddingType, Matchers.is(oneOf("int8", "byte"))); - configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdFloat).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdFloat).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "float")); @@ -99,7 +99,7 @@ public void testCohereEmbeddings() throws IOException { assertEmbeddingInference(oldClusterIdFloat, CohereEmbeddingType.FLOAT); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "embed-english-light-v3.0")); assertThat(serviceSettings, hasEntry("embedding_type", "byte")); @@ -116,7 +116,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); put(upgradedClusterIdByte, embeddingConfigByte(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdByte).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdByte).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "byte")); @@ -129,7 +129,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); put(upgradedClusterIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdInt8).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdInt8).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "byte")); // int8 rewritten to byte @@ -141,7 +141,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); put(upgradedClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdFloat).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdFloat).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "float")); @@ -179,12 +179,12 @@ public void testRerank() throws IOException { if (isOldCluster()) { put(oldClusterId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("models"); + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertRerank(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("models"); + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "rerank-english-v3.0")); @@ -195,7 +195,7 @@ public void testRerank() throws IOException { } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("models"); + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "rerank-english-v3.0")); @@ -206,7 +206,7 @@ public void testRerank() throws IOException { // New endpoint put(upgradedClusterId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); - configs = (List>) get(upgradedClusterId).get("models"); + configs = (List>) get(upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertRerank(upgradedClusterId); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java index 718678f97f37f..899a02776195d 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java @@ -63,18 +63,18 @@ public void testHFEmbeddings() throws IOException { embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(oldClusterId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); assertEmbeddingInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); // Inference on old cluster model @@ -83,7 +83,7 @@ public void testHFEmbeddings() throws IOException { embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(upgradedClusterId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(upgradedClusterId); @@ -110,17 +110,17 @@ public void testElser() throws IOException { if (isOldCluster()) { put(oldClusterId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertElser(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); assertElser(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); var taskSettings = (Map) configs.get(0).get("task_settings"); assertThat(taskSettings.keySet(), empty()); @@ -129,7 +129,7 @@ public void testElser() throws IOException { // New endpoint put(upgradedClusterId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); - configs = (List>) get(upgradedClusterId).get("models"); + configs = (List>) get(upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertElser(upgradedClusterId); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java index fe08db9b94b89..ecfec2304c8a1 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.upgrades.ParameterizedRollingUpgradeTestCase; +import org.elasticsearch.upgrades.AbstractRollingUpgradeTestCase; import java.io.IOException; import java.util.List; @@ -21,7 +21,7 @@ import static org.elasticsearch.core.Strings.format; -public class InferenceUpgradeTestCase extends ParameterizedRollingUpgradeTestCase { +public class InferenceUpgradeTestCase extends AbstractRollingUpgradeTestCase { public InferenceUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java index 4e8e1c845b070..bfdcb0e0d5ed4 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java @@ -65,12 +65,12 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(oldClusterId, inferenceConfig, TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("openai", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); var taskSettings = (Map) configs.get(0).get("task_settings"); @@ -80,7 +80,7 @@ public void testOpenAiEmbeddings() throws IOException { assertEmbeddingInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); // model id is moved to service settings assertThat(serviceSettings, hasEntry("model_id", "text-embedding-ada-002")); @@ -94,7 +94,7 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(upgradedClusterId, inferenceConfig, TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(upgradedClusterId); @@ -122,12 +122,12 @@ public void testOpenAiCompletions() throws IOException { if (isOldCluster()) { put(oldClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); - var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertCompletionInference(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); assertEquals("openai", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "gpt-4")); @@ -137,7 +137,7 @@ public void testOpenAiCompletions() throws IOException { assertCompletionInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "gpt-4")); var taskSettings = (Map) configs.get(0).get("task_settings"); @@ -146,7 +146,7 @@ public void testOpenAiCompletions() throws IOException { assertCompletionInference(oldClusterId); put(upgradedClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); - configs = (List>) get(TaskType.COMPLETION, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.COMPLETION, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); // Inference on the new config diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java index 39eaaceae08bc..73ba286c9031a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; @@ -32,4 +33,10 @@ public ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings) { + var overriddenModel = AzureOpenAiCompletionModel.of(model, taskSettings); + return new AzureOpenAiCompletionAction(sender, overriddenModel, serviceComponents); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java index 49d1ce61b12dd..f45c1d797085e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.inference.external.action.azureopenai; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; public interface AzureOpenAiActionVisitor { ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(AzureOpenAiCompletionModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java new file mode 100644 index 0000000000000..d38d02ef9620f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.AzureOpenAiCompletionRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class AzureOpenAiCompletionAction implements ExecutableAction { + + private final String errorMessage; + private final AzureOpenAiCompletionRequestManager requestCreator; + private final Sender sender; + + public AzureOpenAiCompletionAction(Sender sender, AzureOpenAiCompletionModel model, ServiceComponents serviceComponents) { + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.requestCreator = new AzureOpenAiCompletionRequestManager(model, serviceComponents.threadPool()); + this.errorMessage = constructFailedToSendRequestMessage(model.getUri(), "Azure OpenAI completion"); + } + + @Override + public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { + if (inferenceInputs instanceof DocumentsOnlyInput == false) { + listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); + return; + } + + var docsOnlyInput = (DocumentsOnlyInput) inferenceInputs; + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("Azure OpenAI completion only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(requestCreator, inferenceInputs, timeout, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java deleted file mode 100644 index db1f91cc751ee..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.azureopenai; - -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; - -import java.util.Objects; - -public record AzureOpenAiAccount( - String resourceName, - String deploymentId, - String apiVersion, - @Nullable SecureString apiKey, - @Nullable SecureString entraId -) { - - public AzureOpenAiAccount { - Objects.requireNonNull(resourceName); - Objects.requireNonNull(deploymentId); - Objects.requireNonNull(apiVersion); - Objects.requireNonNullElse(apiKey, entraId); - } - - public static AzureOpenAiAccount fromModel(AzureOpenAiEmbeddingsModel model) { - return new AzureOpenAiAccount( - model.getServiceSettings().resourceName(), - model.getServiceSettings().deploymentId(), - model.getServiceSettings().apiVersion(), - model.getSecretSettings().apiKey(), - model.getSecretSettings().entraId() - ); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java new file mode 100644 index 0000000000000..2811155f6f357 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.azureopenai.AzureOpenAiCompletionResponseEntity; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class AzureOpenAiCompletionRequestManager extends AzureOpenAiRequestManager { + + private static final Logger logger = LogManager.getLogger(AzureOpenAiCompletionRequestManager.class); + + private static final ResponseHandler HANDLER = createCompletionHandler(); + + private final AzureOpenAiCompletionModel model; + + private static ResponseHandler createCompletionHandler() { + return new AzureOpenAiResponseHandler("azure openai completion", AzureOpenAiCompletionResponseEntity::fromResponse); + } + + public AzureOpenAiCompletionRequestManager(AzureOpenAiCompletionModel model, ThreadPool threadPool) { + super(threadPool, model); + this.model = Objects.requireNonNull(model); + } + + @Override + public Runnable create( + @Nullable String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + HttpClientContext context, + ActionListener listener + ) { + AzureOpenAiCompletionRequest request = new AzureOpenAiCompletionRequest(input, model); + return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java new file mode 100644 index 0000000000000..8854dc7950365 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +public class AzureOpenAiCompletionRequest implements AzureOpenAiRequest { + + private final List input; + + private final URI uri; + + private final AzureOpenAiCompletionModel model; + + public AzureOpenAiCompletionRequest(List input, AzureOpenAiCompletionModel model) { + this.input = input; + this.model = Objects.requireNonNull(model); + this.uri = model.getUri(); + } + + @Override + public HttpRequest createHttpRequest() { + var httpPost = new HttpPost(uri); + var requestEntity = Strings.toString(new AzureOpenAiCompletionRequestEntity(input, model.getTaskSettings().user())); + + ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); + httpPost.setEntity(byteEntity); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public URI getURI() { + return this.uri; + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + @Override + public Request truncate() { + // No truncation for Azure OpenAI completion + return this; + } + + @Override + public boolean[] getTruncationInfo() { + // No truncation for Azure OpenAI completion + return null; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java new file mode 100644 index 0000000000000..86614ef32855f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record AzureOpenAiCompletionRequestEntity(List messages, @Nullable String user) implements ToXContentObject { + + private static final String NUMBER_OF_RETURNED_CHOICES_FIELD = "n"; + + private static final String MESSAGES_FIELD = "messages"; + + private static final String ROLE_FIELD = "role"; + + private static final String CONTENT_FIELD = "content"; + + private static final String USER_FIELD = "user"; + + public AzureOpenAiCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(MESSAGES_FIELD); + + { + for (String message : messages) { + builder.startObject(); + + { + builder.field(ROLE_FIELD, USER_FIELD); + builder.field(CONTENT_FIELD, message); + } + + builder.endObject(); + } + } + + builder.endArray(); + + builder.field(NUMBER_OF_RETURNED_CHOICES_FIELD, 1); + + if (Strings.isNullOrEmpty(user) == false) { + builder.field(USER_FIELD, user); + } + + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java index f60d0130a01b6..00af244fca913 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java @@ -7,15 +7,10 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; -import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.message.BasicHeader; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.ValidationException; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; -import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiAccount; import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; @@ -24,24 +19,15 @@ import java.nio.charset.StandardCharsets; import java.util.Objects; -import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; -import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; - public class AzureOpenAiEmbeddingsRequest implements AzureOpenAiRequest { - private static final String MISSING_AUTHENTICATION_ERROR_MESSAGE = - "The request does not have any authentication methods set. One of [%s] or [%s] is required."; private final Truncator truncator; - private final AzureOpenAiAccount account; private final Truncator.TruncationResult truncationResult; private final URI uri; private final AzureOpenAiEmbeddingsModel model; public AzureOpenAiEmbeddingsRequest(Truncator truncator, Truncator.TruncationResult input, AzureOpenAiEmbeddingsModel model) { this.truncator = Objects.requireNonNull(truncator); - this.account = AzureOpenAiAccount.fromModel(model); this.truncationResult = Objects.requireNonNull(input); this.model = Objects.requireNonNull(model); this.uri = model.getUri(); @@ -62,21 +48,7 @@ public HttpRequest createHttpRequest() { ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); httpPost.setEntity(byteEntity); - httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); - - var entraId = model.getSecretSettings().entraId(); - var apiKey = model.getSecretSettings().apiKey(); - - if (entraId != null && entraId.isEmpty() == false) { - httpPost.setHeader(createAuthBearerHeader(entraId)); - } else if (apiKey != null && apiKey.isEmpty() == false) { - httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); - } else { - // should never happen due to the checks on the secret settings, but just in case - ValidationException validationException = new ValidationException(); - validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); - throw validationException; - } + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); return new HttpRequest(httpPost, getInferenceEntityId()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java index edb7c70b3903e..79a0e4a4eba33 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java @@ -7,6 +7,40 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.message.BasicHeader; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -public interface AzureOpenAiRequest extends Request {} +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; + +public interface AzureOpenAiRequest extends Request { + + String MISSING_AUTHENTICATION_ERROR_MESSAGE = + "The request does not have any authentication methods set. One of [%s] or [%s] is required."; + + static void decorateWithAuthHeader(HttpPost httpPost, AzureOpenAiSecretSettings secretSettings) { + httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); + + var entraId = secretSettings.entraId(); + var apiKey = secretSettings.apiKey(); + + if (entraId != null && entraId.isEmpty() == false) { + httpPost.setHeader(createAuthBearerHeader(entraId)); + } else if (apiKey != null && apiKey.isEmpty() == false) { + httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); + } else { + // should never happen due to the checks on the secret settings, but just in case + ValidationException validationException = new ValidationException(); + validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); + throw validationException; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java index 16a02a4c06c1c..6e657640e27ec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java @@ -13,6 +13,8 @@ public class AzureOpenAiUtils { public static final String OPENAI_PATH = "openai"; public static final String DEPLOYMENTS_PATH = "deployments"; public static final String EMBEDDINGS_PATH = "embeddings"; + public static final String CHAT_PATH = "chat"; + public static final String COMPLETIONS_PATH = "completions"; public static final String API_VERSION_PARAMETER = "api-version"; public static final String API_KEY_HEADER = "api-key"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index 42fd0ddc812ec..55a7f35710cf6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -39,7 +39,7 @@ public static void moveToFirstToken(XContentParser parser) throws IOException { public static void positionParserAtTokenAfterField(XContentParser parser, String field, String errorMsgTemplate) throws IOException { XContentParser.Token token = parser.nextToken(); - while (token != null && token != XContentParser.Token.END_OBJECT) { + while (token != null) { if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { parser.nextToken(); return; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java new file mode 100644 index 0000000000000..ca1df7027cb40 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class AzureOpenAiCompletionResponseEntity { + + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Azure OpenAI completions response"; + + /** + * Parses the Azure OpenAI completion response. + * For a request like: + * + *
+     *     
+     *         {
+     *             "inputs": "Please summarize this text: some text"
+     *         }
+     *     
+     * 
+ * + * The response would look like: + * + *
+     *     
+     *         {
+     *     "choices": [
+     *         {
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             },
+     *             "finish_reason": "stop",
+     *             "index": 0,
+     *             "logprobs": null,
+     *             "message": {
+     *                 "content": "response",
+     *                 "role": "assistant"
+     *             }
+     *         }
+     *     ],
+     *     "created": 1714982782,
+     *     "id": "...",
+     *     "model": "gpt-4",
+     *     "object": "chat.completion",
+     *     "prompt_filter_results": [
+     *         {
+     *             "prompt_index": 0,
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             }
+     *         }
+     *     ],
+     *     "system_fingerprint": null,
+     *     "usage": { ... }
+     * }
+     *     
+     * 
+ */ + public static ChatCompletionResults fromResponse(Request request, HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "choices", FAILED_TO_FIND_FIELD_TEMPLATE); + + jsonParser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, jsonParser.currentToken(), jsonParser); + + positionParserAtTokenAfterField(jsonParser, "message", FAILED_TO_FIND_FIELD_TEMPLATE); + + token = jsonParser.currentToken(); + + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); + + XContentParser.Token contentToken = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.VALUE_STRING, contentToken, jsonParser); + String content = jsonParser.text(); + + return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(content))); + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java index 5e50229e25643..708088af54cc2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.azureopenai; +import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -14,11 +15,18 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.core.Strings.format; + public abstract class AzureOpenAiModel extends Model { protected URI uri; @@ -50,6 +58,30 @@ protected AzureOpenAiModel(AzureOpenAiModel model, ServiceSettings serviceSettin public abstract ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings); + public final URI buildUriString() throws URISyntaxException { + return AzureOpenAiModel.buildUri(resourceName(), deploymentId(), apiVersion(), operationPathSegments()); + } + + // use only for testing directly + public static URI buildUri(String resourceName, String deploymentId, String apiVersion, String... pathSegments) + throws URISyntaxException { + String hostname = format("%s.%s", resourceName, AzureOpenAiUtils.HOST_SUFFIX); + + return new URIBuilder().setScheme("https") + .setHost(hostname) + .setPathSegments(createPathSegmentsList(deploymentId, pathSegments)) + .addParameter(AzureOpenAiUtils.API_VERSION_PARAMETER, apiVersion) + .build(); + } + + private static List createPathSegmentsList(String deploymentId, String[] pathSegments) { + List pathSegmentsList = new ArrayList<>( + List.of(AzureOpenAiUtils.OPENAI_PATH, AzureOpenAiUtils.DEPLOYMENTS_PATH, deploymentId) + ); + pathSegmentsList.addAll(Arrays.asList(pathSegments)); + return pathSegmentsList; + } + public URI getUri() { return uri; } @@ -62,4 +94,13 @@ public void setUri(URI newUri) { public AzureOpenAiRateLimitServiceSettings rateLimitServiceSettings() { return rateLimitServiceSettings; } + + // TODO: can be inferred directly from modelConfigurations.getServiceSettings(); will be addressed with separate refactoring + public abstract String resourceName(); + + public abstract String deploymentId(); + + public abstract String apiVersion(); + + public abstract String[] operationPathSegments(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java index f871fe6c080a1..48e45f368bfe2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java @@ -25,12 +25,16 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalSecureString; -public record AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) implements SecretSettings { +public class AzureOpenAiSecretSettings implements SecretSettings { public static final String NAME = "azure_openai_secret_settings"; public static final String API_KEY = "api_key"; public static final String ENTRA_ID = "entra_id"; + private final SecureString entraId; + + private final SecureString apiKey; + public static AzureOpenAiSecretSettings fromMap(@Nullable Map map) { if (map == null) { return null; @@ -59,14 +63,24 @@ public static AzureOpenAiSecretSettings fromMap(@Nullable Map ma return new AzureOpenAiSecretSettings(secureApiToken, secureEntraId); } - public AzureOpenAiSecretSettings { + public AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) { Objects.requireNonNullElse(apiKey, entraId); + this.apiKey = apiKey; + this.entraId = entraId; } public AzureOpenAiSecretSettings(StreamInput in) throws IOException { this(in.readOptionalSecureString(), in.readOptionalSecureString()); } + public SecureString apiKey() { + return apiKey; + } + + public SecureString entraId() { + return entraId; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -98,4 +112,17 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalSecureString(apiKey); out.writeOptionalSecureString(entraId); } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiSecretSettings that = (AzureOpenAiSecretSettings) object; + return Objects.equals(entraId, that.entraId) && Objects.equals(apiKey, that.apiKey); + } + + @Override + public int hashCode() { + return Objects.hash(entraId, apiKey); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index c6b97e22b099d..e0e48ab20a86b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; @@ -121,19 +122,23 @@ private static AzureOpenAiModel createModel( String failureMessage, ConfigurationParseContext context ) { - if (taskType == TaskType.TEXT_EMBEDDING) { - return new AzureOpenAiEmbeddingsModel( - inferenceEntityId, - taskType, - NAME, - serviceSettings, - taskSettings, - secretSettings, - context - ); + switch (taskType) { + case TEXT_EMBEDDING -> { + return new AzureOpenAiEmbeddingsModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); + } + case COMPLETION -> { + return new AzureOpenAiCompletionModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + } + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } - - throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java new file mode 100644 index 0000000000000..05cb663453542 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import java.net.URISyntaxException; +import java.util.Map; + +public class AzureOpenAiCompletionModel extends AzureOpenAiModel { + + public static AzureOpenAiCompletionModel of(AzureOpenAiCompletionModel model, Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return model; + } + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap(taskSettings); + return new AzureOpenAiCompletionModel(model, AzureOpenAiCompletionTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets + ) { + this( + inferenceEntityId, + taskType, + service, + AzureOpenAiCompletionServiceSettings.fromMap(serviceSettings), + AzureOpenAiCompletionTaskSettings.fromMap(taskSettings), + AzureOpenAiSecretSettings.fromMap(secrets) + ); + } + + // Should only be used directly for testing + AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + AzureOpenAiCompletionServiceSettings serviceSettings, + AzureOpenAiCompletionTaskSettings taskSettings, + @Nullable AzureOpenAiSecretSettings secrets + ) { + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings + ); + try { + this.uri = buildUriString(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionServiceSettings serviceSettings) { + super(originalModel, serviceSettings); + } + + private AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionTaskSettings taskSettings) { + super(originalModel, taskSettings); + } + + @Override + public AzureOpenAiCompletionServiceSettings getServiceSettings() { + return (AzureOpenAiCompletionServiceSettings) super.getServiceSettings(); + } + + @Override + public AzureOpenAiCompletionTaskSettings getTaskSettings() { + return (AzureOpenAiCompletionTaskSettings) super.getTaskSettings(); + } + + @Override + public AzureOpenAiSecretSettings getSecretSettings() { + return (AzureOpenAiSecretSettings) super.getSecretSettings(); + } + + @Override + public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings) { + return creator.create(this, taskSettings); + } + + @Override + public String resourceName() { + return getServiceSettings().resourceName(); + } + + @Override + public String deploymentId() { + return getServiceSettings().deploymentId(); + } + + @Override + public String apiVersion() { + return getServiceSettings().apiVersion(); + } + + @Override + public String[] operationPathSegments() { + return new String[] { AzureOpenAiUtils.CHAT_PATH, AzureOpenAiUtils.COMPLETIONS_PATH }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java new file mode 100644 index 0000000000000..5dd42bb1b911f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.USER; + +public record AzureOpenAiCompletionRequestTaskSettings(@Nullable String user) { + + public static final AzureOpenAiCompletionRequestTaskSettings EMPTY_SETTINGS = new AzureOpenAiCompletionRequestTaskSettings(null); + + public static AzureOpenAiCompletionRequestTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionRequestTaskSettings(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java new file mode 100644 index 0000000000000..4100ce7358a3f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.API_VERSION; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.DEPLOYMENT_ID; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.RESOURCE_NAME; + +public class AzureOpenAiCompletionServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + AzureOpenAiRateLimitServiceSettings { + + public static final String NAME = "azure_openai_completions_service_settings"; + + /** + * Rate limit documentation can be found here: + * + * Limits per region per model id + * https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits + * + * How to change the limits + * https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota?tabs=rest + * + * Blog giving some examples + * https://techcommunity.microsoft.com/t5/fasttrack-for-azure/optimizing-azure-openai-a-guide-to-limits-quotas-and-best/ba-p/4076268 + * + * According to the docs 1000 tokens per minute (TPM) = 6 requests per minute (RPM). The limits change depending on the region + * and model. The lowest chat completions limit is 20k TPM, so we'll default to that. + * Calculation: 20K TPM = 20 * 6 = 120 requests per minute (used `francecentral` and `gpt-4` as basis for the calculation). + */ + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(120); + + public static AzureOpenAiCompletionServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + var settings = fromMap(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionServiceSettings(settings); + } + + private static AzureOpenAiCompletionServiceSettings.CommonFields fromMap( + Map map, + ValidationException validationException + ) { + String resourceName = extractRequiredString(map, RESOURCE_NAME, ModelConfigurations.SERVICE_SETTINGS, validationException); + String deploymentId = extractRequiredString(map, DEPLOYMENT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String apiVersion = extractRequiredString(map, API_VERSION, ModelConfigurations.SERVICE_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + + return new AzureOpenAiCompletionServiceSettings.CommonFields(resourceName, deploymentId, apiVersion, rateLimitSettings); + } + + private record CommonFields(String resourceName, String deploymentId, String apiVersion, RateLimitSettings rateLimitSettings) {} + + private final String resourceName; + private final String deploymentId; + private final String apiVersion; + + private final RateLimitSettings rateLimitSettings; + + public AzureOpenAiCompletionServiceSettings( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable RateLimitSettings rateLimitSettings + ) { + this.resourceName = resourceName; + this.deploymentId = deploymentId; + this.apiVersion = apiVersion; + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + public AzureOpenAiCompletionServiceSettings(StreamInput in) throws IOException { + resourceName = in.readString(); + deploymentId = in.readString(); + apiVersion = in.readString(); + rateLimitSettings = new RateLimitSettings(in); + } + + private AzureOpenAiCompletionServiceSettings(AzureOpenAiCompletionServiceSettings.CommonFields fields) { + this(fields.resourceName, fields.deploymentId, fields.apiVersion, fields.rateLimitSettings); + } + + public String resourceName() { + return resourceName; + } + + public String deploymentId() { + return deploymentId; + } + + @Override + public RateLimitSettings rateLimitSettings() { + return DEFAULT_RATE_LIMIT_SETTINGS; + } + + public String apiVersion() { + return apiVersion; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.field(RESOURCE_NAME, resourceName); + builder.field(DEPLOYMENT_ID, deploymentId); + builder.field(API_VERSION, apiVersion); + + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(resourceName); + out.writeString(deploymentId); + out.writeString(apiVersion); + rateLimitSettings.writeTo(out); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionServiceSettings that = (AzureOpenAiCompletionServiceSettings) object; + return Objects.equals(resourceName, that.resourceName) + && Objects.equals(deploymentId, that.deploymentId) + && Objects.equals(apiVersion, that.apiVersion) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(resourceName, deploymentId, apiVersion, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java new file mode 100644 index 0000000000000..6e9f77e1ade21 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; + +public class AzureOpenAiCompletionTaskSettings implements TaskSettings { + + public static final String NAME = "azure_openai_completion_task_settings"; + + public static final String USER = "user"; + + public static AzureOpenAiCompletionTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionTaskSettings(user); + } + + private final String user; + + public static AzureOpenAiCompletionTaskSettings of( + AzureOpenAiCompletionTaskSettings originalSettings, + AzureOpenAiCompletionRequestTaskSettings requestSettings + ) { + var userToUse = requestSettings.user() == null ? originalSettings.user : requestSettings.user(); + return new AzureOpenAiCompletionTaskSettings(userToUse); + } + + public AzureOpenAiCompletionTaskSettings(@Nullable String user) { + this.user = user; + } + + public AzureOpenAiCompletionTaskSettings(StreamInput in) throws IOException { + this.user = in.readOptionalString(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (user != null) { + builder.field(USER, user); + } + } + builder.endObject(); + return builder; + } + + public String user() { + return user; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(user); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionTaskSettings that = (AzureOpenAiCompletionTaskSettings) object; + return Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java index 93d1e31a3bed1..377bb33f58619 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.azureopenai.embeddings; -import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -19,12 +18,9 @@ import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -import java.net.URI; import java.net.URISyntaxException; import java.util.Map; -import static org.elasticsearch.core.Strings.format; - public class AzureOpenAiEmbeddingsModel extends AzureOpenAiModel { public static AzureOpenAiEmbeddingsModel of(AzureOpenAiEmbeddingsModel model, Map taskSettings) { @@ -70,7 +66,7 @@ public AzureOpenAiEmbeddingsModel( serviceSettings ); try { - this.uri = getEmbeddingsUri(serviceSettings.resourceName(), serviceSettings.deploymentId(), serviceSettings.apiVersion()); + this.uri = buildUriString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -104,17 +100,24 @@ public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map { - builder.startObject(); - - toXContentFragmentOfExposedFields(builder, params); - - builder.endObject(); - return builder; - }; + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index 33136c339e757..4c39d35e2ff03 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -18,9 +18,9 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -38,7 +38,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; -public class CohereServiceSettings implements ServiceSettings, CohereRateLimitServiceSettings { +public class CohereServiceSettings extends FilteredXContentObject implements ServiceSettings, CohereRateLimitServiceSettings { public static final String NAME = "cohere_service_settings"; public static final String OLD_MODEL_ID_FIELD = "model"; @@ -173,6 +173,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public XContentBuilder toXContentFragment(XContentBuilder builder, Params params) throws IOException { + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + + return builder; + } + + @Override + public XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { if (uri != null) { builder.field(URL, uri.toString()); } @@ -188,16 +196,10 @@ public XContentBuilder toXContentFragment(XContentBuilder builder, Params params if (modelId != null) { builder.field(MODEL_ID, modelId); } - rateLimitSettings.toXContent(builder, params); return builder; } - @Override - public ToXContentObject getFilteredXContentObject() { - return this; - } - @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index 7d78091a20106..00a406a7a3efa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -16,11 +16,11 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import java.io.IOException; import java.util.EnumSet; @@ -30,7 +30,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; -public class CohereEmbeddingsServiceSettings implements ServiceSettings { +public class CohereEmbeddingsServiceSettings extends FilteredXContentObject implements ServiceSettings { public static final String NAME = "cohere_embeddings_service_settings"; static final String EMBEDDING_TYPE = "embedding_type"; @@ -160,8 +160,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ToXContentObject getFilteredXContentObject() { - return this; + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + commonSettings.toXContentFragmentOfExposedFields(builder, params); + builder.field(EMBEDDING_TYPE, elementType()); + + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java index 19538be3734ba..6a74fe533e3db 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java @@ -13,16 +13,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import java.io.IOException; import java.util.Map; import java.util.Objects; -public class CohereRerankServiceSettings implements ServiceSettings { +public class CohereRerankServiceSettings extends FilteredXContentObject implements ServiceSettings { public static final String NAME = "cohere_rerank_service_settings"; public static CohereRerankServiceSettings fromMap(Map map, ConfigurationParseContext parseContext) { @@ -62,8 +62,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ToXContentObject getFilteredXContentObject() { - return this; + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + commonSettings.toXContentFragmentOfExposedFields(builder, params); + + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index beb9035640024..af2c433663ac4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -17,8 +17,8 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -36,7 +36,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; -public class HuggingFaceServiceSettings implements ServiceSettings, HuggingFaceRateLimitServiceSettings { +public class HuggingFaceServiceSettings extends FilteredXContentObject implements ServiceSettings, HuggingFaceRateLimitServiceSettings { public static final String NAME = "hugging_face_service_settings"; // At the time of writing HuggingFace hasn't posted the default rate limit for inference endpoints so the value here is only a guess @@ -118,6 +118,14 @@ public HuggingFaceServiceSettings(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(URL, uri.toString()); if (similarity != null) { builder.field(SIMILARITY, similarity); @@ -128,14 +136,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - rateLimitSettings.toXContent(builder, params); - builder.endObject(); - return builder; - } - @Override - public ToXContentObject getFilteredXContentObject() { - return this; + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index a48ccd14fdb66..1f337de450ef9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -28,7 +28,10 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings.extractUri; -public class HuggingFaceElserServiceSettings implements ServiceSettings, HuggingFaceRateLimitServiceSettings { +public class HuggingFaceElserServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + HuggingFaceRateLimitServiceSettings { public static final String NAME = "hugging_face_elser_service_settings"; static final String URL = "url"; @@ -56,7 +59,8 @@ public HuggingFaceElserServiceSettings(String url) { rateLimitSettings = DEFAULT_RATE_LIMIT_SETTINGS; } - private HuggingFaceElserServiceSettings(URI uri, @Nullable RateLimitSettings rateLimitSettings) { + // default for testing + HuggingFaceElserServiceSettings(URI uri, @Nullable RateLimitSettings rateLimitSettings) { this.uri = Objects.requireNonNull(uri); this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); } @@ -88,8 +92,7 @@ public int maxInputTokens() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(URL, uri.toString()); - builder.field(MAX_INPUT_TOKENS, ELSER_TOKEN_LIMIT); + toXContentFragmentOfExposedFields(builder, params); rateLimitSettings.toXContent(builder, params); builder.endObject(); @@ -97,8 +100,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ToXContentObject getFilteredXContentObject() { - return this; + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(URL, uri.toString()); + builder.field(MAX_INPUT_TOKENS, ELSER_TOKEN_LIMIT); + + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java index 7703476a14dea..5105bb59e048f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java @@ -15,9 +15,9 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -38,7 +38,7 @@ /** * Defines the service settings for interacting with OpenAI's chat completion models. */ -public class OpenAiChatCompletionServiceSettings implements ServiceSettings, OpenAiRateLimitServiceSettings { +public class OpenAiChatCompletionServiceSettings extends FilteredXContentObject implements ServiceSettings, OpenAiRateLimitServiceSettings { public static final String NAME = "openai_completion_service_settings"; @@ -141,24 +141,29 @@ public Integer maxInputTokens() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - { - builder.field(MODEL_ID, modelId); + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); - if (uri != null) { - builder.field(URL, uri.toString()); - } + builder.endObject(); + return builder; + } - if (organizationId != null) { - builder.field(ORGANIZATION, organizationId); - } + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(MODEL_ID, modelId); - if (maxInputTokens != null) { - builder.field(MAX_INPUT_TOKENS, maxInputTokens); - } + if (uri != null) { + builder.field(URL, uri.toString()); + } + + if (organizationId != null) { + builder.field(ORGANIZATION, organizationId); + } + + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - rateLimitSettings.toXContent(builder, params); - builder.endObject(); return builder; } @@ -184,11 +189,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - @Override - public ToXContentObject getFilteredXContentObject() { - return this; - } - @Override public boolean equals(Object object) { if (this == object) return true; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java index 373704af37fcd..b3b94f7584563 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.inference.services.openai.embeddings; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.ValidationException; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; @@ -25,7 +23,6 @@ * @param user a unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse */ public record OpenAiEmbeddingsRequestTaskSettings(@Nullable String user) { - private static final Logger logger = LogManager.getLogger(OpenAiEmbeddingsRequestTaskSettings.class); public static final OpenAiEmbeddingsRequestTaskSettings EMPTY_SETTINGS = new OpenAiEmbeddingsRequestTaskSettings(null); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index 8edbb7bc14f2c..690e8f0ddd947 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -17,10 +17,10 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -44,7 +44,7 @@ /** * Defines the service settings for interacting with OpenAI's text embedding models. */ -public class OpenAiEmbeddingsServiceSettings implements ServiceSettings, OpenAiRateLimitServiceSettings { +public class OpenAiEmbeddingsServiceSettings extends FilteredXContentObject implements ServiceSettings, OpenAiRateLimitServiceSettings { public static final String NAME = "openai_service_settings"; @@ -261,6 +261,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); if (dimensionsSetByUser != null) { builder.field(DIMENSIONS_SET_BY_USER, dimensionsSetByUser); @@ -270,7 +271,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private void toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(MODEL_ID, modelId); if (uri != null) { builder.field(URL, uri.toString()); @@ -287,19 +289,8 @@ private void toXContentFragmentOfExposedFields(XContentBuilder builder, Params p if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - rateLimitSettings.toXContent(builder, params); - } - @Override - public ToXContentObject getFilteredXContentObject() { - return (builder, params) -> { - builder.startObject(); - - toXContentFragmentOfExposedFields(builder, params); - - builder.endObject(); - return builder; - }; + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java new file mode 100644 index 0000000000000..655e50e073972 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.settings; + +import org.elasticsearch.inference.FilteredXContent; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +public abstract class FilteredXContentObject implements FilteredXContent { + @Override + public ToXContentObject getFilteredXContentObject() { + return (builder, params) -> { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + + builder.endObject(); + return builder; + }; + } + + protected abstract XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, ToXContent.Params params) + throws IOException; +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java index 4bdba67beec17..567e26101283e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -45,8 +46,9 @@ import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests.createModel; -import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -54,6 +56,11 @@ public class AzureOpenAiActionCreatorTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private static final Settings ZERO_TIMEOUT_SETTINGS = buildSettingsWithRetryFields( + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1), + TimeValue.timeValueSeconds(0) + ); private final MockWebServer webServer = new MockWebServer(); private ThreadPool threadPool; private HttpClientManager clientManager; @@ -103,7 +110,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel() throws IOException { var model = createModel("resource", "deployment", "apiversion", "orig_user", "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -116,7 +123,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel() throws IOException { validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -153,7 +160,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOExcepti var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap(null); + var overriddenTaskSettings = createRequestTaskSettingsMap(null); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -166,7 +173,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOExcepti validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abc"), null); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abc"), null); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -174,12 +181,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOExcepti public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() throws IOException { // timeout as zero for no retries - var settings = buildSettingsWithRetryFields( - TimeValue.timeValueMillis(1), - TimeValue.timeValueMinutes(1), - TimeValue.timeValueSeconds(0) - ); - var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, settings); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); try (var sender = senderFactory.createSender("test_service")) { sender.start(); @@ -209,7 +211,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -226,7 +228,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -281,7 +283,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -295,13 +297,13 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); } { validateRequestWithApiKey(webServer.requests().get(1), "apikey"); var requestMap = entityAsMap(webServer.requests().get(1).getBody()); - validateRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); } } catch (URISyntaxException e) { throw new RuntimeException(e); @@ -357,7 +359,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -371,13 +373,13 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); } { validateRequestWithApiKey(webServer.requests().get(1), "apikey"); var requestMap = entityAsMap(webServer.requests().get(1).getBody()); - validateRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); } } catch (URISyntaxException e) { throw new RuntimeException(e); @@ -416,7 +418,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { var model = createModel("resource", "deployment", "apiversion", null, false, 1, null, null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -429,13 +431,186 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("sup"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("sup"), "overridden_user"); } catch (URISyntaxException e) { throw new RuntimeException(e); } } - private void validateRequestMapWithUser(Map requestMap, List input, @Nullable String user) { + public void testInfer_AzureOpenAiCompletion_WithOverriddenUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var originalUser = "original_user"; + var overriddenUser = "overridden_user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var model = createCompletionModel("resource", "deployment", "apiversion", originalUser, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var taskSettingsWithUserOverride = createRequestTaskSettingsMap(overriddenUser); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, taskSettingsWithUserOverride); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), overriddenUser); + + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_WithoutUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(null); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), null); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_FailsFromInvalidResponseFormat() throws IOException { + // timeout as zero for no retries + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + // "choices" missing + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + var userOverride = "overridden_user"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(userOverride); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer))) + ); + assertThat( + thrownException.getCause().getMessage(), + is("Failed to find required field [choices] in Azure OpenAI completions response") + ); + + assertThat(webServer.requests(), hasSize(1)); + validateRequestWithApiKey(webServer.requests().get(0), apiKey); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), userOverride); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + private void validateEmbeddingsRequestMapWithUser(Map requestMap, List input, @Nullable String user) { var expectedSize = user == null ? 1 : 2; assertThat(requestMap.size(), is(expectedSize)); @@ -446,6 +621,24 @@ private void validateRequestMapWithUser(Map requestMap, List requestMap, List input, @Nullable String user) { + assertThat("input for completions can only be of size 1", input.size(), equalTo(1)); + + var expectedSize = user == null ? 2 : 3; + + assertThat(requestMap.size(), is(expectedSize)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input.get(0))); + + if (user != null) { + assertThat(requestMap.get("user"), is(user)); + } + } + + @SuppressWarnings("unchecked") + public static String getContentOfMessageInRequestMap(Map requestMap) { + return ((Map) ((List) requestMap.get("messages")).get(0)).get("content").toString(); + } + private void validateRequestWithApiKey(MockRequest request, String apiKey) { assertNull(request.getUri().getQuery()); assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java new file mode 100644 index 0000000000000..96127841c17a8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java @@ -0,0 +1,200 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var user = "user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var action = createAction("resource", "deployment", "apiversion", user, apiKey, sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), is(XContentType.JSON.mediaType())); + assertThat(request.getHeader(AzureOpenAiUtils.API_KEY_HEADER), is(apiKey)); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + + var requestMap = entityAsMap(request.getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(completionInput)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + private AzureOpenAiCompletionAction createAction( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable String user, + String apiKey, + Sender sender, + String inferenceEntityId + ) { + try { + var model = createCompletionModel(resourceName, deploymentId, apiVersion, user, apiKey, null, inferenceEntityId); + model.setUri(new URI(getUrl(webServer))); + return new AzureOpenAiCompletionAction(sender, model, createWithEmptySettings(threadPool)); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index 98eff32f72983..ff2448803d7ce 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -45,7 +45,7 @@ import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionRequestTaskSettingsTests.getChatCompletionRequestTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests.createModel; -import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -101,7 +101,7 @@ public void testCreate_OpenAiEmbeddingsModel() throws IOException { var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -154,7 +154,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutUser() throws IOException { var model = createModel(getUrl(webServer), "org", "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap(null); + var overriddenTaskSettings = createRequestTaskSettingsMap(null); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -206,7 +206,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutOrganization() throws IOExce var model = createModel(getUrl(webServer), null, "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -265,7 +265,7 @@ public void testCreate_OpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() th var model = createModel(getUrl(webServer), null, "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -595,7 +595,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -678,7 +678,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -746,7 +746,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { // truncated to 1 token = 3 characters var model = createModel(getUrl(webServer), "org", "secret", "model", "user", 1); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java index b802403dcd28d..e28c3e817b351 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import org.hamcrest.CoreMatchers; import org.junit.After; import org.junit.Before; @@ -272,8 +271,8 @@ public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOExc var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), CoreMatchers.is("OpenAI completions only accepts 1 input")); - assertThat(thrownException.status(), CoreMatchers.is(RestStatus.BAD_REQUEST)); + assertThat(thrownException.getMessage(), is("OpenAI completions only accepts 1 input")); + assertThat(thrownException.status(), is(RestStatus.BAD_REQUEST)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java new file mode 100644 index 0000000000000..2d37f273e1de2 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiRequest.MISSING_AUTHENTICATION_ERROR_MESSAGE; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AzureOpenAiRequestTests extends ESTestCase { + + public void testDecorateWithAuthHeader_apiKeyPresent() { + var apiKey = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(apiKey, null); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var apiKeyHeader = httpPost.getFirstHeader(API_KEY_HEADER); + + assertThat(apiKeyHeader.getValue(), equalTo(apiKey.toString())); + } + + public void testDecorateWithAuthHeader_entraIdPresent() { + var entraId = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(null, entraId); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var authHeader = httpPost.getFirstHeader(HttpHeaders.AUTHORIZATION); + + assertThat(authHeader.getValue(), equalTo("Bearer " + entraId)); + } + + public void testDecorateWithAuthHeader_entraIdAndApiKeyMissing_throwMissingAuthValidationException() { + var httpPost = new HttpPost(); + var secretSettingsMock = mock(AzureOpenAiSecretSettings.class); + + when(secretSettingsMock.entraId()).thenReturn(null); + when(secretSettingsMock.apiKey()).thenReturn(null); + + ValidationException exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettingsMock) + ); + assertTrue(exception.getMessage().contains(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID))); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..7647a4983f4be --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequestEntity; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; + +public class AzureOpenAiCompletionRequestEntityTests extends ESTestCase { + + public void testXContent_WritesSingleMessage_DoesNotWriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1}""")); + } + + public void testXContent_WritesSingleMessage_WriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), "user"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1,"user":"user"}""")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java new file mode 100644 index 0000000000000..048d4ea16d56f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTests extends ESTestCase { + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + protected AzureOpenAiCompletionRequest createRequest( + String resource, + String deployment, + String apiVersion, + String apiKey, + String entraId, + String input, + String user + ) { + var completionModel = AzureOpenAiCompletionModelTests.createCompletionModel( + resource, + deployment, + apiVersion, + user, + apiKey, + entraId, + "id" + ); + + return new AzureOpenAiCompletionRequest(List.of(input), completionModel); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java similarity index 96% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java index 14283ed53eed9..f732a01c893e8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java @@ -5,13 +5,14 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequestEntity; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java similarity index 73% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java index 88e6880b72f0b..bbd8a49d65f46 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; @@ -14,56 +14,69 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; -import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequest; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests; import java.io.IOException; -import java.net.URISyntaxException; import java.util.List; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class AzureOpenAiEmbeddingsRequestTests extends ESTestCase { - public void testCreateRequest_WithApiKeyDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", "apikey", null, "abc", "user"); + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is("apikey")); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } - public void testCreateRequest_WithEntraIdDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", null, "entraId", "abc", "user"); + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer entraId")); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { @@ -87,7 +100,7 @@ public void testIsTruncated_ReturnsTrue() { assertTrue(truncatedRequest.getTruncationInfo()[0]); } - public static AzureOpenAiEmbeddingsRequest createRequest( + public AzureOpenAiEmbeddingsRequest createRequest( String resourceName, String deploymentId, String apiVersion, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java index 4f7cd9ea89a14..897c648eb942f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -106,6 +106,24 @@ public void testPositionParserAtTokenAfterField_ThrowsWithMalformedJSON() throws } } + public void testPositionParserAtTokenAfterField_ConsumesUntilEnd() throws IOException { + var json = """ + { + "key": { + "foo": "bar" + }, + "target": "value" + } + """; + + var errorFormat = "Error: %s"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "target", errorFormat); + assertEquals("value", parser.text()); + } + } + public void testConsumeUntilObjectEnd() throws IOException { var json = """ { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java new file mode 100644 index 0000000000000..3afe4bd439e0f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java @@ -0,0 +1,220 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionResponseEntityTests extends ESTestCase { + + public void testFromResponse_CreatesResultsForASingleItem() throws IOException { + String responseJson = """ + { + "choices": [ + { + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion", + "prompt_filter_results": [ + { + "prompt_index": 0, + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + } + } + ], + "usage": { + "completion_tokens": 138, + "prompt_tokens": 11, + "total_tokens": 149 + } + }"""; + + ChatCompletionResults chatCompletionResults = AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(chatCompletionResults.getResults().size(), equalTo(1)); + + ChatCompletionResults.Result result = chatCompletionResults.getResults().get(0); + assertThat(result.asMap().get(result.getResultsField()), is("response")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [choices] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotAnArray() { + String responseJson = """ + { + "choices": { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + }, + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [FIELD_NAME]") + ); + } + + public void testFromResponse_FailsWhenMessageDoesNotExist() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "not_message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [message] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenMessageValueIsAString() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": "string" + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [VALUE_STRING]") + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java index 18f702014e2d8..080602e8fd245 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java @@ -74,7 +74,7 @@ public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, @@ -112,7 +112,7 @@ public void testFromResponse_FailsWhenChoicesFieldNotAnArray() { }, "logprobs": null, "finish_reason": "stop" - }, + } }, "usage": { "prompt_tokens": 46, @@ -153,7 +153,7 @@ public void testFromResponse_FailsWhenMessageDoesNotExist() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java index 97fa6efc962bb..d2b83d7b14e2b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import org.hamcrest.CoreMatchers; import java.io.IOException; import java.util.HashMap; @@ -119,7 +118,7 @@ public void testToXContext_WritesApiKeyOnlyWhenEntraIdIsNull() throws IOExceptio String xContentResult = Strings.toString(builder); var expectedResult = Strings.format("{\"%s\":\"apikey\"}", API_KEY); - assertThat(xContentResult, CoreMatchers.is(expectedResult)); + assertThat(xContentResult, is(expectedResult)); } public void testToXContext_WritesEntraIdOnlyWhenApiKeyIsNull() throws IOException { @@ -129,7 +128,7 @@ public void testToXContext_WritesEntraIdOnlyWhenApiKeyIsNull() throws IOExceptio String xContentResult = Strings.toString(builder); var expectedResult = Strings.format("{\"%s\":\"entraid\"}", ENTRA_ID); - assertThat(xContentResult, CoreMatchers.is(expectedResult)); + assertThat(xContentResult, is(expectedResult)); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java new file mode 100644 index 0000000000000..93d948a5bdcf3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class AzureOpenAiCompletionModelTests extends ESTestCase { + + public void testOverrideWith_UpdatedTaskSettings_OverridesUser() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiVersion = "api version"; + var apiKey = "api key"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var user = "user"; + var userOverride = "user override"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var requestTaskSettingsMap = taskSettingsMap(userOverride); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat( + overriddenModel, + equalTo(createCompletionModel(resource, deploymentId, apiVersion, userOverride, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testOverrideWith_EmptyMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var requestTaskSettingsMap = Map.of(); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_NullMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var overriddenModel = AzureOpenAiCompletionModel.of(model, null); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_UpdatedServiceSettings_OverridesApiVersion() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var apiVersion = "api version"; + var updatedApiVersion = "updated api version"; + + var updatedServiceSettings = new AzureOpenAiCompletionServiceSettings(resource, deploymentId, updatedApiVersion, null); + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var overriddenModel = new AzureOpenAiCompletionModel(model, updatedServiceSettings); + + assertThat( + overriddenModel, + is(createCompletionModel(resource, deploymentId, updatedApiVersion, user, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + } + + public static AzureOpenAiCompletionModel createModelWithRandomValues() { + return createCompletionModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + + public static AzureOpenAiCompletionModel createCompletionModel( + String resourceName, + String deploymentId, + String apiVersion, + String user, + @Nullable String apiKey, + @Nullable String entraId, + String inferenceEntityId + ) { + var secureApiKey = apiKey != null ? new SecureString(apiKey.toCharArray()) : null; + var secureEntraId = entraId != null ? new SecureString(entraId.toCharArray()) : null; + + return new AzureOpenAiCompletionModel( + inferenceEntityId, + TaskType.COMPLETION, + "service", + new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null), + new AzureOpenAiCompletionTaskSettings(user), + new AzureOpenAiSecretSettings(secureApiKey, secureEntraId) + ); + } + + private Map taskSettingsMap(String user) { + Map taskSettingsMap = new HashMap<>(); + taskSettingsMap.put(AzureOpenAiServiceFields.USER, user); + return taskSettingsMap; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java new file mode 100644 index 0000000000000..51963c275a08a --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTaskSettingsTests extends ESTestCase { + + public void testFromMap_ReturnsEmptySettings_WhenMapIsEmpty() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsEmptySettings_WhenMapDoesNotContainKnownFields() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsUser() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + assertThat(settings.user(), is("user")); + } + + public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { + var exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java new file mode 100644 index 0000000000000..cbaa41c37958d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionServiceSettingsTests extends AbstractWireSerializingTestCase { + + private static AzureOpenAiCompletionServiceSettings createRandom() { + var resourceName = randomAlphaOfLength(8); + var deploymentId = randomAlphaOfLength(8); + var apiVersion = randomAlphaOfLength(8); + + return new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null); + } + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var resourceName = "this-resource"; + var deploymentId = "this-deployment"; + var apiVersion = "2024-01-01"; + + var serviceSettings = AzureOpenAiCompletionServiceSettings.fromMap( + new HashMap<>( + Map.of( + AzureOpenAiServiceFields.RESOURCE_NAME, + resourceName, + AzureOpenAiServiceFields.DEPLOYMENT_ID, + deploymentId, + AzureOpenAiServiceFields.API_VERSION, + apiVersion + ) + ) + ); + + assertThat(serviceSettings, is(new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null))); + } + + public void testToXContent_WritesAllValues() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024","rate_limit":{"requests_per_minute":120}}""")); + } + + public void testToFilteredXContent_WritesAllValues_Except_RateLimit() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = entity.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionServiceSettings::new; + } + + @Override + protected AzureOpenAiCompletionServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected AzureOpenAiCompletionServiceSettings mutateInstance(AzureOpenAiCompletionServiceSettings instance) throws IOException { + return createRandom(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java new file mode 100644 index 0000000000000..7f0e730b8835c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; +import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static AzureOpenAiCompletionTaskSettings createRandomWithUser() { + return new AzureOpenAiCompletionTaskSettings(randomAlphaOfLength(15)); + } + + public static AzureOpenAiCompletionTaskSettings createRandom() { + var user = randomBoolean() ? randomAlphaOfLength(15) : null; + return new AzureOpenAiCompletionTaskSettings(user); + } + + public void testFromMap_WithUser() { + var user = "user"; + + assertThat( + new AzureOpenAiCompletionTaskSettings(user), + is(AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user)))) + ); + } + + public void testFromMap_UserIsEmptyString() { + var thrownException = expectThrows( + ValidationException.class, + () -> AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [task_settings] Invalid value empty string. [user] must be a non-empty string;")) + ); + } + + public void testFromMap_MissingUser_DoesNotThrowException() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of())); + assertNull(taskSettings.user()); + } + + public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of( + taskSettings, + AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS + ); + assertThat(overriddenTaskSettings, is(taskSettings)); + } + + public void testOverrideWith_UsesOverriddenSettings() { + var user = "user"; + var userOverride = "user override"; + + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user))); + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap( + new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, userOverride)) + ); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of(taskSettings, requestTaskSettings); + assertThat(overriddenTaskSettings, is(new AzureOpenAiCompletionTaskSettings(userOverride))); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionTaskSettings::new; + } + + @Override + protected AzureOpenAiCompletionTaskSettings createTestInstance() { + return createRandomWithUser(); + } + + @Override + protected AzureOpenAiCompletionTaskSettings mutateInstance(AzureOpenAiCompletionTaskSettings instance) throws IOException { + return createRandomWithUser(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java index aebc2240983f7..1747155623a98 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import java.net.URISyntaxException; import java.util.Map; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettingsTests.getAzureOpenAiRequestTaskSettingsMap; @@ -65,6 +66,35 @@ public void testCreateModel_FromUpdatedServiceSettings() { assertThat(overridenModel, is(createModel("resource", "deployment", "override_apiversion", "user", "api_key", null, "id"))); } + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); + } + + public static AzureOpenAiEmbeddingsModel createModelWithRandomValues() { + return createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + public static AzureOpenAiEmbeddingsModel createModel( String resourceName, String deploymentId, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java index 3ff73e0f23656..0aef2a97ee0a1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; -import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettings; import java.util.HashMap; import java.util.Map; @@ -21,30 +20,30 @@ public class AzureOpenAiEmbeddingsRequestTaskSettingsTests extends ESTestCase { public void testFromMap_ReturnsEmptySettings_WhenTheMapIsEmpty() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of())); - assertThat(settings, is(OpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS)); + var settings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertThat(settings, is(AzureOpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS)); } public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); + var settings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); assertNull(settings.user()); } public void testFromMap_ReturnsUser() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); + var settings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); assertThat(settings.user(), is("user")); } public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { var exception = expectThrows( ValidationException.class, - () -> OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, ""))) + () -> AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, ""))) ); assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); } - public static Map getRequestTaskSettingsMap(@Nullable String user) { + public static Map createRequestTaskSettingsMap(@Nullable String user) { var map = new HashMap(); if (user != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java index 79bd28fd8b600..7c56ffad27c80 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; -import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -364,7 +363,7 @@ public void testToXContent_WritesDimensionsSetByUserTrue() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ "rate_limit":{"requests_per_minute":2},"dimensions_set_by_user":true}""")); } @@ -385,12 +384,12 @@ public void testToXContent_WritesAllValues() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ "dimensions":1024,"max_input_tokens":512,"rate_limit":{"requests_per_minute":3},"dimensions_set_by_user":false}""")); } - public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() throws IOException { + public void testToFilteredXContent_WritesAllValues_Except_DimensionsSetByUser_RateLimit() throws IOException { var entity = new AzureOpenAiEmbeddingsServiceSettings( "resource", "deployment", @@ -407,9 +406,9 @@ public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() t filteredXContent.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ - "dimensions":1024,"max_input_tokens":512,"rate_limit":{"requests_per_minute":1}}""")); + "dimensions":1024,"max_input_tokens":512}""")); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java index cb224f4089c0a..a010f63802052 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; -import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -260,7 +259,7 @@ public void testXContent_WritesModelId() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"modelId","rate_limit":{"requests_per_minute":1}}""")); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index 24edb9bfe87f0..1ac97642f0b85 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -16,6 +16,9 @@ import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; @@ -23,6 +26,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -314,6 +318,35 @@ public void testFromCohereOrDenseVectorEnumValues() { assertTrue(validation.validationErrors().isEmpty()); } + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new CohereEmbeddingsServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)), + CohereEmbeddingType.INT8 + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ + "rate_limit":{"requests_per_minute":3},"embedding_type":"byte"}""")); + } + + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new CohereEmbeddingsServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)), + CohereEmbeddingType.INT8 + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ + "embedding_type":"byte"}""")); + } + @Override protected Writeable.Reader instanceReader() { return CohereEmbeddingsServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java index 64af547171af2..4f5d872f09eb8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceFields; import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; -import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -110,7 +109,7 @@ private static > String getValidValuesSortedAndCombined(EnumSe public void testXContent_ThrowsAssertionFailure_WhenInputTypeIsUnspecified() { var thrownException = expectThrows(AssertionError.class, () -> new CohereEmbeddingsTaskSettings(InputType.UNSPECIFIED, null)); - MatcherAssert.assertThat(thrownException.getMessage(), CoreMatchers.is("received invalid input type value [unspecified]")); + MatcherAssert.assertThat(thrownException.getMessage(), is("received invalid input type value [unspecified]")); } public void testOf_KeepsOriginalValuesWhenRequestSettingsAreNull_AndRequestInputTypeIsInvalid() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java new file mode 100644 index 0000000000000..cb30077fec174 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class CohereRerankServiceSettingsTests extends AbstractWireSerializingTestCase { + public static CohereRerankServiceSettings createRandom() { + var commonSettings = CohereServiceSettingsTests.createRandom(); + + return new CohereRerankServiceSettings(commonSettings); + } + + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new CohereRerankServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + // TODO we probably shouldn't allow configuring these fields for reranking + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ + "rate_limit":{"requests_per_minute":3}}""")); + } + + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new CohereRerankServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + // TODO we probably shouldn't allow configuring these fields for reranking + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return CohereRerankServiceSettings::new; + } + + @Override + protected CohereRerankServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected CohereRerankServiceSettings mutateInstance(CohereRerankServiceSettings instance) throws IOException { + return null; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + List entries = new ArrayList<>(); + entries.addAll(new MlInferenceNamedXContentProvider().getNamedWriteables()); + entries.addAll(InferenceNamedWriteablesProvider.getNamedWriteables()); + return new NamedWriteableRegistry(entries); + } + + public static Map getServiceSettingsMap(@Nullable String url, @Nullable String model) { + return new HashMap<>(CohereServiceSettingsTests.getServiceSettingsMap(url, model)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java index 8ebf5b1dfd615..d81c94a0dedda 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -173,6 +173,18 @@ public void testToXContent_WritesAllValues() throws IOException { {"url":"url","rate_limit":{"requests_per_minute":3}}""")); } + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new HuggingFaceServiceSettings(ServiceUtils.createUri("url"), null, null, null, new RateLimitSettings(3)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"url":"url"}""")); + } + @Override protected Writeable.Reader instanceReader() { return HuggingFaceServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java index 525f701323511..eadefddecce70 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java @@ -11,6 +11,11 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; import java.util.HashMap; @@ -82,6 +87,29 @@ public void testFromMap_InvalidUrl_ThrowsError() { ); } + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new HuggingFaceElserServiceSettings(ServiceUtils.createUri("url"), new RateLimitSettings(3)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"url":"url","max_input_tokens":512,"rate_limit":{"requests_per_minute":3}}""")); + } + + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new HuggingFaceElserServiceSettings(ServiceUtils.createUri("url"), new RateLimitSettings(3)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"url":"url","max_input_tokens":512}""")); + } + @Override protected Writeable.Reader instanceReader() { return HuggingFaceElserServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java index 5531f1c14ddff..b9b4310699d07 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java @@ -211,6 +211,19 @@ public void testToXContent_DoesNotWriteOptionalValues() throws IOException { {"model_id":"model","rate_limit":{"requests_per_minute":500}}""")); } + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new OpenAiChatCompletionServiceSettings("model", "url", "org", 1024, new RateLimitSettings(2)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"model_id":"model","url":"url","organization_id":"org",""" + """ + "max_input_tokens":1024}""")); + } + @Override protected Writeable.Reader instanceReader() { return OpenAiChatCompletionServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java index b978e2563ece7..86b7f4421954d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java @@ -16,7 +16,7 @@ import java.util.Map; -import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; @@ -24,7 +24,7 @@ public class OpenAiEmbeddingsModelTests extends ESTestCase { public void testOverrideWith_OverridesUser() { var model = createModel("url", "org", "api_key", "model_name", null); - var requestTaskSettingsMap = getRequestTaskSettingsMap("user_override"); + var requestTaskSettingsMap = createRequestTaskSettingsMap("user_override"); var overriddenModel = OpenAiEmbeddingsModel.of(model, requestTaskSettingsMap); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java index c95853e2d0128..6892e92d936e5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java @@ -32,7 +32,7 @@ public void testFromMap_ReturnsUser() { assertThat(settings.user(), is("user")); } - public static Map getRequestTaskSettingsMap(@Nullable String user) { + public static Map createRequestTaskSettingsMap(@Nullable String user) { var map = new HashMap(); if (user != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java index 92fb00a4061e2..c964d2643459d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; -import org.hamcrest.CoreMatchers; import java.io.IOException; import java.net.URI; @@ -366,7 +365,7 @@ public void testToXContent_WritesDimensionsSetByUserTrue() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org",""" + """ "rate_limit":{"requests_per_minute":3000},"dimensions_set_by_user":true}""")); } @@ -378,7 +377,7 @@ public void testToXContent_WritesDimensionsSetByUserFalse() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org",""" + """ "rate_limit":{"requests_per_minute":3000},"dimensions_set_by_user":false}""")); } @@ -390,7 +389,7 @@ public void testToXContent_WritesAllValues() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":3000},"dimensions_set_by_user":false}""")); } @@ -403,9 +402,9 @@ public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() t filteredXContent.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ - "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":3000}}""")); + "dimensions":1,"max_input_tokens":2}""")); } public void testToFilteredXContent_WritesAllValues_WithSpecifiedRateLimit() throws IOException { @@ -425,9 +424,9 @@ public void testToFilteredXContent_WritesAllValues_WithSpecifiedRateLimit() thro filteredXContent.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ - "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":2000}}""")); + "dimensions":1,"max_input_tokens":2}""")); } @Override diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 26f5ea053771c..f42dcc6179d04 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,6 +1,5 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.dra.DraResolvePlugin -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -115,12 +114,6 @@ artifacts { archives tasks.named("jar") } -if (BuildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.semantic_text_feature_flag_enabled', 'true' - } -} - tasks.register("extractNativeLicenses", Copy) { dependsOn configurations.nativeBundle into "${buildDir}/extractedNativeLicenses" diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index 93dc8077196d7..bbe5bea691c35 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -190,9 +190,20 @@ Reader normalize(CharSequence str) { BytesRef subStr = maybeSubStr.get(); int numChars = UnicodeUtil.UTF8toUTF16(subStr.bytes, subStr.offset, subStr.length, reusableCharDecodeBuffer); normalizedCharPos += numChars; - if (numChars != end - startIter) { - addOffCorrectMap(normalizedCharPos, getLastCumulativeDiff() + end - startIter - numChars); + int charDelta = numChars - (end - startIter); // output length - input length + if (charDelta < 0) { + // normalised form is shorter + int lastDiff = getLastCumulativeDiff(); + addOffCorrectMap(normalizedCharPos, lastDiff + charDelta); + } else if (charDelta > 0) { + // inserted chars, add the offset in the output stream + int lastDiff = getLastCumulativeDiff(); + int startOffset = normalizedCharPos - charDelta; + for (int i = 1; i <= charDelta; i++) { + addOffCorrectMap(startOffset + i, lastDiff - i); + } } + strBuilder.append(reusableCharDecodeBuffer, 0, numChars); bytePos += byteLen; continue; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java index eef9902d35e59..20b68b2b6e750 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java @@ -62,6 +62,28 @@ public void testCharThatNormalizesToLongText() throws IOException { assertNormalization("ﷺ", parsed, "صلى الله عليه وسلم"); } + public void testOutOfBounds() throws IOException { + @SuppressWarnings("checkstyle:linelength") + String[] inputs = new String[] { + "ﷺ", + "Građevne strukture Mesa Verde dokaz su akumuliranog znanja i vještina koje su se stoljećima prenosile generacijama civilizacije Anasazi. Vrhunce svojih dosega ostvarili su u 12. i 13. stoljeću, kada su sagrađene danas najpoznatije građevine na liticama. Zidali su obrađenim pješčenjakom, tvrđim kamenom oblikovanim do veličine štruce kruha. Kao žbuku između ciglā stavljali su glinu razmočenu vodom. Tim su materijalom gradili prostorije veličine do 6 četvornih metara. U potkrovljima su skladištili žitarice i druge plodine, dok su kive - ceremonijalne prostorije - gradili ispred soba, ali ukopane u zemlju, nešto poput današnjih podruma. Kiva je bila vrhunski dizajnirana prostorija okruglog oblika s prostorom za vatru zimi te s dovodom hladnog zraka za klimatizaciju ljeti. U zidane konstrukcije stavljali su i lokalno posječena stabla, što današnjim arheolozima pomaže u preciznom datiranju nastanka pojedine građevine metodom dendrokronologije. Ta stabla pridonose i teoriji o mogućem konačnom slomu ondašnjeg društva. Nakon što su, tijekom nekoliko stoljeća, šume do kraja srušene, a njihova obnova zbog sušne klime traje i po 200 godina, nije proteklo puno vremena do konačnog urušavanja civilizacije, koja se, na svojem vrhuncu osjećala nepobjedivom. 90 % sagrađenih naseobina ispod stijena ima do deset prostorija. ⅓ od ukupnog broja sagrađenih kuća ima jednu ili dvije kamene prostorije", + "Histoarysk wie in acre in stik lân dat 40 roeden (oftewol 1 furlong of ⅛ myl of 660 foet) lang wie, en 4 roeden (of 66 foet) breed. Men is fan tinken dat dat likernôch de grûnmjitte wie dy't men mei in jok oksen yn ien dei beploegje koe.", + "創業当初の「太平洋化学工業社」から1959年太平洋化学工業株式会社へ、1987年には太平洋化学㈱に社名を変更。 1990年以降、海外拠点を増やし本格的な国際進出を始動。 創業者がつくりあげた化粧品会社を世界企業へと成長させるべく2002年3月英文社名AMOREPACIFICに改めた。", + "امام محمد بن جرير رح جن جي ولادت باسعادت 224 هجري طبرستان جي شهر آمل ۾ ٿي ، هي اهو دور هو جڏهن سلطنت عباسيه جو عروج هو ۽ سندس سڄي جمار عهد خلافت عباسيه ۾ گذري ، طبرستان هن وقت پڻ سياست ۽ مذهبي حلقن جنهن ۾ معتزلي ، خوارج ، باطني جو گهوارو هو ۽ ابن جرير جي ٻيهر طبرستان ورڻ وقت روافض جو عروج ٿي ويو هو ابن جرير رح جو نالو ، محمد بن جرير بن يزيد بن ڪثير بن غالب الطبري الآملي هو سندس کوڙ سار لقب آهن جنهن ۾ الامام ، المجتهد ، المفسر ، المورخ، المحدث ، الحافظ ، العلامه ، اللغوي ، المقريءَ ۽ سندس اهي سڀئي القاب سندس بزرگيت تي دلالت ڪن ٿيون . سندس ڪنيت (ابن جرير) هئي ۽ طبرستان ۽ آمل ڏينهن نسبت هجڻ ڪري پاڻ الطبري ۽ الآملي سڏرائيندا هئا. ابن جرير رح هڪ آسودي گهراني ۾ اک کولي ، سندس پيءُ هڪ ڏينهن خواب ڏٺائين ته ابن جرير رح نبي ڪريم ﷺ جي ٻنهي هٿن جي وچ ۾ آهن ۽ نبي ڪريمﷺ جي هٿن مبارڪن ۾ پٿريون آهن جنهن کي ابن جرير رح کڻي اڇلائي رهيا آهن ، عالمن کان جڏهن هن جي تعبير پڇا ڪيائين ته انهن چيو ته اوهان جو پٽ وڏو ٿي ڪري دين جي خدمت سرانجام ڏيندو ۽ اهو خواب ابن جرير جو علم حاصل ڪرڻ جو سبب بڻيو. ابن جرير رح ستن سالن ۾ قرآن مجيد حفظ ڪيائين اٺن سالم ۾ امامت جهڙو فريضو انجام ڏنائين نون سالن ۾ حديث لکڻ شروع ڪيائين ۽ جڏهن سورهن سالن جا ٿيا ته اماماحمد بن حنبل رح جي زيارت جو شوق ۾ بغداد ڏانهن سفر ڪرڻ شروع ڪيائين ، سندس سڄو خرچ ۽ بار پيءُ کڻدو هو جڏهن سندس والد جو انتقال ٿيو ته ورثي ۾ زمين جو ٽڪڙو مليس جنهن جي آمدني مان ابن جرير رح پنهنجو گذر سفر فرمائيندا هئا .", + "۱۔ ھن شق جي مطابق قادياني گروھ يا لاھوري گروھ جي ڪنھن رڪن کي جيڪو پاڻ کي 'احمدي' يا ڪنھن ٻي نالي سان پڪاري جي لاءِ ممنوع قرار ڏنو ويو آھي تہ ھو (الف) ڳالھائي، لکي يا ڪنھن ٻي طريقي سان ڪنھن خليفي يا آنحضور ﷺ جي ڪنھن صحابي کان علاوہڍه ڪنھن کي امير المومنين يا خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔", + "سعد بن فضالہ جو شام کے جہاد میں سہیل کے ساتھ تھے بیان کرتے ہیں کہ ایک مرتبہ سہیل نے کہا کہ میں نے رسول اللہ ﷺ سے سنا ہے کہ خدا کی راہ میں ایک گھڑی صرف کرنا گھر کے تمام عمر کے اعمال سے بہتر ہے، اس لیے اب میں شام کا جہاد چھوڑ کر گھر نہ جاؤں گا اور یہیں جان دونگا، اس عہد پر اس سختی سے قائم رہے کہ طاعون عمواس میں بھی نہ ہٹے اور 18ھ میں اسی وبا میں شام کے غربت کدہ میں جان دی۔", + "دعوت اسلام کے آغاز یعنی آنحضرتﷺ کے ارقم کے گھر میں تشریف لانے سے پہلے مشرف باسلام ہوئے،پھر ہجرت کے زمانہ میں مکہ سے مدینہ گئے آنحضرتﷺ نے غربت کی اجنبیت دورکرنے کے لیے ان میں اورابوعبیدہ بن تیہاں میں مواخاۃ کرادی۔", + "ضرار اپنے قبیلہ کے اصحاب ثروت میں تھے، عرب میں سب سے بڑی دولت اونٹ کے گلے تھے، ضرار کے پاس ہزار اونٹوں کا گلہ تھا، اسلام کے جذب وولولے میں تمام مال ودولت چھوڑ کر خالی ہاتھ آستانِ نبوی پر پہنچے قبول اسلام کے بعد آنحضرتﷺ نے بنی صید اوربنی ہذیل کی طرف بھیجا۔", + "(2) اگر زلیخا کو ملامت کرنے والی عورتیں آپ ﷺ کی جبین انور دیکھ پاتیں تو ہاتھوں کے بجائے اپنے دل کاٹنے کو ترجیح دیتیں۔صحیح بخاری میں ہے، حضرت عطاء بن یسار ؓہُنے حضرت عبداللہ بن عمرو ؓسے سیّدِ عالمﷺ کے وہ اوصاف دریافت کئے جو توریت میں مذکور ہیں تو انہوں نے فرمایا : ’’خدا کی قسم! حضور سیدُ المرسلینﷺ کے جو اوصاف قرآنِ کریم میں آئے ہیں انہیں میں سے بعض اوصاف توریت میں مذکور ہیں۔ اس کے بعد انہوں نے پڑھنا شروع کیا: اے نبی! ہم نے تمہیں شاہد و مُبَشِّر اور نذیر اور اُمِّیُّوں کا نگہبان بنا کر بھیجا، تم میرے بندے اور میرے رسول ہو، میں نے تمہارا نام متوکل رکھا،نہ بدخلق ہو نہ سخت مزاج، نہ بازاروں میں آواز بلند کرنے والے ہو نہ برائی سے برائی کو دفع کرنے والے بلکہ خطا کاروں کو معاف کرتے ہو اور ان پر احسان فرماتے ہو، اللہ تعالیٰ تمہیں نہ اٹھائے گا جب تک کہ تمہاری برکت سے غیر مستقیم ملت کو اس طرح راست نہ فرمادے کہ لوگ صدق و یقین کے ساتھ ’’ لَآاِلٰہَ اِلَّا اللہُ مُحَمَّدٌ رَّسُوْلُ اللہِ‘‘ پکارنے لگیں اور تمہاری بدولت اندھی آنکھیں بینا اور بہرے کان شنوا (سننے والے) اور پردوں میں لپٹے ہوئے دل کشادہ ہوجائیں۔ اور کعب احبارؓسے سرکارِ رسالت ﷺکی صفات میں توریت شریف کا یہ مضمون بھی منقول ہے کہ’’ اللہ تعالیٰ نے آپ ﷺکی صفت میں فرمایا کہ’’ میں اُنہیں ہر خوبی کے قابل کروں گا، اور ہر خُلقِ کریم عطا فرماؤں گا، اطمینانِ قلب اور وقار کو اُن کا لباس بناؤں گا اور طاعات وا حسان کو ان کا شعار کروں گا۔ تقویٰ کو ان کا ضمیر، حکمت کو ان کا راز، صدق و وفا کو اُن کی طبیعت ،عفوو کرم کو اُن کی عادت ، عدل کو ان کی سیرت، اظہارِ حق کو اُن کی شریعت، ہدایت کو اُن کا امام اور اسلام کو اُن کی ملت بناؤں گا۔ احمد اُن کا نام ہے، مخلوق کو اُن کے صدقے میں گمراہی کے بعد ہدایت اور جہالت کے بعد علم و معرفت اور گمنامی کے بعد رفعت و منزلت عطا کروں گا۔ اُنہیں کی برکت سے قلت کے بعد کثرت اور فقر کے بعد دولت اور تَفَرُّقے کے بعد محبت عنایت کروں گا، اُنہیں کی بدولت مختلف قبائل، غیر مجتمع خواہشوں اور اختلاف رکھنے والے دلوں میں اُلفت پیدا کروں گا اور اُن کی اُمت کو تمام اُمتوں سے بہتر کروں گا۔ ایک اور حدیث میں توریت سے حضور سید المرسلینﷺسے یہ اوصاف منقول ہیں ’’میرے بندے احمد مختار، ان کی جائے ولادت مکہ مکرمہ اور جائے ہجرت مدینہ طیبہ ہے،اُن کی اُمت ہر حال میں اللہ تعالٰی کی کثیر حمد کرنے والی ہے۔ مُنَزَّہٌ عَنْ شَرِیْکٍ فِیْ مَحَاسِنِہٖ", + "بالآخر آنحضرتﷺ کے اس عفو وکرم نے یہ معجزہ دکھایا کہ سہیل حنین کی واپسی کے وقت آپ کے ساتھ ہوگئے اورمقام جعرانہ پہنچ کر خلعتِ اسلام سے سرفراز ہوئے آنحضرت ﷺ نے ازراہ مرحمت حنین کے مالِ غنیمت میں سے سو اونٹ عطا فرمائے، گو فتح مکہ کے بعد کے مسلمانوں کا شمار مؤلفۃ القلوب میں ہے، لیکن سہیل اس زمرہ میں اس حیثیت سے ممتاز ہیں کہ اسلام کے بعد ان سے کوئی بات اسلام کے خلاف ظہور پزیر نہیں ہوئی ،حافظ ابن حجرعسقلانی لکھتے ہیں، کان محمودالا سلام من حین اسلم۔", }; + + PrecompiledCharMapNormalizer.Config parsed = loadTestCharMap(); + + for (var s : inputs) { + normalise(s, parsed); + } + } + private void assertNormalization(String input, PrecompiledCharMapNormalizer.Config config, String expected) throws IOException { PrecompiledCharMapNormalizer normalizer = new PrecompiledCharMapNormalizer( config.offsets(), @@ -77,6 +99,21 @@ private void assertNormalization(String input, PrecompiledCharMapNormalizer.Conf } } + private void normalise(String input, PrecompiledCharMapNormalizer.Config config) throws IOException { + PrecompiledCharMapNormalizer normalizer = new PrecompiledCharMapNormalizer( + config.offsets(), + config.utf8str(), + new StringReader(input) + ); + char[] output = new char[64]; + int offset = 0; + int size = 64; + int read = normalizer.read(output, offset, size); + while (read > 0) { + read = normalizer.read(output, offset, size); + } + } + static PrecompiledCharMapNormalizer.Config loadTestCharMap() throws IOException { return PrecompiledCharMapNormalizer.fromBase64EncodedResource( "/org/elasticsearch/xpack/ml/inference.nlp.tokenizers/spm_precompiled_normalizer.txt" diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java new file mode 100644 index 0000000000000..b0cce14c59114 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java @@ -0,0 +1,2179 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary; + +import java.io.IOException; + +/** + * {@link #loadMultiLingualTestVocab()} loads a vocabulary file containing + * a subset of the XLM RoBERTa vocabulary and scores sufficient to tokenize + * the strings in {@link #MULTILINUGAL_TEXTS}. + * + * {@link #EXPECTED_TOKENS} is the tokenization of {@link #MULTILINUGAL_TEXTS} + * using the vocabulary and scores in the test vocabulary returned by + * {@link #loadMultiLingualTestVocab()}. The expected tokens were produced by + * tokenizing {@link #MULTILINUGAL_TEXTS} with the HuggingFace transformers + * XLMRoBERTa tokenizer and mapping those tokens to the position of the same + * tokens in the test vocab. + */ +public class XLMRobertaTestVocab { + + public static Vocabulary loadMultiLingualTestVocab() throws IOException { + try ( + var parser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY, + XLMRobertaTokenizer.class.getResourceAsStream( + "/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json" + ) + ) + ) { + return Vocabulary.PARSER.apply(parser, null); + } + } + + @SuppressWarnings("checkstyle:linelength") + public static String[] MULTILINUGAL_TEXTS = new String[] { + "Građevne strukture Mesa Verde dokaz su akumuliranog znanja i vještina koje su se stoljećima prenosile generacijama civilizacije Anasazi. Vrhunce svojih dosega ostvarili su u 12. i 13. stoljeću, kada su sagrađene danas najpoznatije građevine na liticama. Zidali su obrađenim pješčenjakom, tvrđim kamenom oblikovanim do veličine štruce kruha. Kao žbuku između ciglā stavljali su glinu razmočenu vodom. Tim su materijalom gradili prostorije veličine do 6 četvornih metara. U potkrovljima su skladištili žitarice i druge plodine, dok su kive - ceremonijalne prostorije - gradili ispred soba, ali ukopane u zemlju, nešto poput današnjih podruma. Kiva je bila vrhunski dizajnirana prostorija okruglog oblika s prostorom za vatru zimi te s dovodom hladnog zraka za klimatizaciju ljeti. U zidane konstrukcije stavljali su i lokalno posječena stabla, što današnjim arheolozima pomaže u preciznom datiranju nastanka pojedine građevine metodom dendrokronologije. Ta stabla pridonose i teoriji o mogućem konačnom slomu ondašnjeg društva. Nakon što su, tijekom nekoliko stoljeća, šume do kraja srušene, a njihova obnova zbog sušne klime traje i po 200 godina, nije proteklo puno vremena do konačnog urušavanja civilizacije, koja se, na svojem vrhuncu osjećala nepobjedivom. 90 % sagrađenih naseobina ispod stijena ima do deset prostorija. ⅓ od ukupnog broja sagrađenih kuća ima jednu ili dvije kamene prostorije", + "Histoarysk wie in acre in stik lân dat 40 roeden (oftewol 1 furlong of ⅛ myl of 660 foet) lang wie, en 4 roeden (of 66 foet) breed. Men is fan tinken dat dat likernôch de grûnmjitte wie dy't men mei in jok oksen yn ien dei beploegje koe.", + "創業当初の「太平洋化学工業社」から1959年太平洋化学工業株式会社へ、1987年には太平洋化学㈱に社名を変更。 1990年以降、海外拠点を増やし本格的な国際進出を始動。 創業者がつくりあげた化粧品会社を世界企業へと成長させるべく2002年3月英文社名AMOREPACIFICに改めた。", + "امام محمد بن جرير رح جن جي ولادت باسعادت 224 هجري طبرستان جي شهر آمل ۾ ٿي ، هي اهو دور هو جڏهن سلطنت عباسيه جو عروج هو ۽ سندس سڄي جمار عهد خلافت عباسيه ۾ گذري ، طبرستان هن وقت پڻ سياست ۽ مذهبي حلقن جنهن ۾ معتزلي ، خوارج ، باطني جو گهوارو هو ۽ ابن جرير جي ٻيهر طبرستان ورڻ وقت روافض جو عروج ٿي ويو هو ابن جرير رح جو نالو ، محمد بن جرير بن يزيد بن ڪثير بن غالب الطبري الآملي هو سندس کوڙ سار لقب آهن جنهن ۾ الامام ، المجتهد ، المفسر ، المورخ، المحدث ، الحافظ ، العلامه ، اللغوي ، المقريءَ ۽ سندس اهي سڀئي القاب سندس بزرگيت تي دلالت ڪن ٿيون . سندس ڪنيت (ابن جرير) هئي ۽ طبرستان ۽ آمل ڏينهن نسبت هجڻ ڪري پاڻ الطبري ۽ الآملي سڏرائيندا هئا. ابن جرير رح هڪ آسودي گهراني ۾ اک کولي ، سندس پيءُ هڪ ڏينهن خواب ڏٺائين ته ابن جرير رح نبي ڪريم ﷺ جي ٻنهي هٿن جي وچ ۾ آهن ۽ نبي ڪريمﷺ جي هٿن مبارڪن ۾ پٿريون آهن جنهن کي ابن جرير رح کڻي اڇلائي رهيا آهن ، عالمن کان جڏهن هن جي تعبير پڇا ڪيائين ته انهن چيو ته اوهان جو پٽ وڏو ٿي ڪري دين جي خدمت سرانجام ڏيندو ۽ اهو خواب ابن جرير جو علم حاصل ڪرڻ جو سبب بڻيو. ابن جرير رح ستن سالن ۾ قرآن مجيد حفظ ڪيائين اٺن سالم ۾ امامت جهڙو فريضو انجام ڏنائين نون سالن ۾ حديث لکڻ شروع ڪيائين ۽ جڏهن سورهن سالن جا ٿيا ته اماماحمد بن حنبل رح جي زيارت جو شوق ۾ بغداد ڏانهن سفر ڪرڻ شروع ڪيائين ، سندس سڄو خرچ ۽ بار پيءُ کڻدو هو جڏهن سندس والد جو انتقال ٿيو ته ورثي ۾ زمين جو ٽڪڙو مليس جنهن جي آمدني مان ابن جرير رح پنهنجو گذر سفر فرمائيندا هئا .", + "۱۔ ھن شق جي مطابق قادياني گروھ يا لاھوري گروھ جي ڪنھن رڪن کي جيڪو پاڻ کي 'احمدي' يا ڪنھن ٻي نالي سان پڪاري جي لاءِ ممنوع قرار ڏنو ويو آھي تہ ھو (الف) ڳالھائي، لکي يا ڪنھن ٻي طريقي سان ڪنھن خليفي يا آنحضور ﷺ جي ڪنھن صحابي کان علاوہڍه ڪنھن کي امير المومنين يا خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔", + "سعد بن فضالہ جو شام کے جہاد میں سہیل کے ساتھ تھے بیان کرتے ہیں کہ ایک مرتبہ سہیل نے کہا کہ میں نے رسول اللہ ﷺ سے سنا ہے کہ خدا کی راہ میں ایک گھڑی صرف کرنا گھر کے تمام عمر کے اعمال سے بہتر ہے، اس لیے اب میں شام کا جہاد چھوڑ کر گھر نہ جاؤں گا اور یہیں جان دونگا، اس عہد پر اس سختی سے قائم رہے کہ طاعون عمواس میں بھی نہ ہٹے اور 18ھ میں اسی وبا میں شام کے غربت کدہ میں جان دی۔", + "دعوت اسلام کے آغاز یعنی آنحضرتﷺ کے ارقم کے گھر میں تشریف لانے سے پہلے مشرف باسلام ہوئے،پھر ہجرت کے زمانہ میں مکہ سے مدینہ گئے آنحضرتﷺ نے غربت کی اجنبیت دورکرنے کے لیے ان میں اورابوعبیدہ بن تیہاں میں مواخاۃ کرادی۔", + "ضرار اپنے قبیلہ کے اصحاب ثروت میں تھے، عرب میں سب سے بڑی دولت اونٹ کے گلے تھے، ضرار کے پاس ہزار اونٹوں کا گلہ تھا، اسلام کے جذب وولولے میں تمام مال ودولت چھوڑ کر خالی ہاتھ آستانِ نبوی پر پہنچے قبول اسلام کے بعد آنحضرتﷺ نے بنی صید اوربنی ہذیل کی طرف بھیجا۔", + "(2) اگر زلیخا کو ملامت کرنے والی عورتیں آپ ﷺ کی جبین انور دیکھ پاتیں تو ہاتھوں کے بجائے اپنے دل کاٹنے کو ترجیح دیتیں۔صحیح بخاری میں ہے، حضرت عطاء بن یسار ؓہُنے حضرت عبداللہ بن عمرو ؓسے سیّدِ عالمﷺ کے وہ اوصاف دریافت کئے جو توریت میں مذکور ہیں تو انہوں نے فرمایا : ’’خدا کی قسم! حضور سیدُ المرسلینﷺ کے جو اوصاف قرآنِ کریم میں آئے ہیں انہیں میں سے بعض اوصاف توریت میں مذکور ہیں۔ اس کے بعد انہوں نے پڑھنا شروع کیا: اے نبی! ہم نے تمہیں شاہد و مُبَشِّر اور نذیر اور اُمِّیُّوں کا نگہبان بنا کر بھیجا، تم میرے بندے اور میرے رسول ہو، میں نے تمہارا نام متوکل رکھا،نہ بدخلق ہو نہ سخت مزاج، نہ بازاروں میں آواز بلند کرنے والے ہو نہ برائی سے برائی کو دفع کرنے والے بلکہ خطا کاروں کو معاف کرتے ہو اور ان پر احسان فرماتے ہو، اللہ تعالیٰ تمہیں نہ اٹھائے گا جب تک کہ تمہاری برکت سے غیر مستقیم ملت کو اس طرح راست نہ فرمادے کہ لوگ صدق و یقین کے ساتھ ’’ لَآاِلٰہَ اِلَّا اللہُ مُحَمَّدٌ رَّسُوْلُ اللہِ‘‘ پکارنے لگیں اور تمہاری بدولت اندھی آنکھیں بینا اور بہرے کان شنوا (سننے والے) اور پردوں میں لپٹے ہوئے دل کشادہ ہوجائیں۔ اور کعب احبارؓسے سرکارِ رسالت ﷺکی صفات میں توریت شریف کا یہ مضمون بھی منقول ہے کہ’’ اللہ تعالیٰ نے آپ ﷺکی صفت میں فرمایا کہ’’ میں اُنہیں ہر خوبی کے قابل کروں گا، اور ہر خُلقِ کریم عطا فرماؤں گا، اطمینانِ قلب اور وقار کو اُن کا لباس بناؤں گا اور طاعات وا حسان کو ان کا شعار کروں گا۔ تقویٰ کو ان کا ضمیر، حکمت کو ان کا راز، صدق و وفا کو اُن کی طبیعت ،عفوو کرم کو اُن کی عادت ، عدل کو ان کی سیرت، اظہارِ حق کو اُن کی شریعت، ہدایت کو اُن کا امام اور اسلام کو اُن کی ملت بناؤں گا۔ احمد اُن کا نام ہے، مخلوق کو اُن کے صدقے میں گمراہی کے بعد ہدایت اور جہالت کے بعد علم و معرفت اور گمنامی کے بعد رفعت و منزلت عطا کروں گا۔ اُنہیں کی برکت سے قلت کے بعد کثرت اور فقر کے بعد دولت اور تَفَرُّقے کے بعد محبت عنایت کروں گا، اُنہیں کی بدولت مختلف قبائل، غیر مجتمع خواہشوں اور اختلاف رکھنے والے دلوں میں اُلفت پیدا کروں گا اور اُن کی اُمت کو تمام اُمتوں سے بہتر کروں گا۔ ایک اور حدیث میں توریت سے حضور سید المرسلینﷺسے یہ اوصاف منقول ہیں ’’میرے بندے احمد مختار، ان کی جائے ولادت مکہ مکرمہ اور جائے ہجرت مدینہ طیبہ ہے،اُن کی اُمت ہر حال میں اللہ تعالٰی کی کثیر حمد کرنے والی ہے۔ مُنَزَّہٌ عَنْ شَرِیْکٍ فِیْ مَحَاسِنِہٖ", + "بالآخر آنحضرتﷺ کے اس عفو وکرم نے یہ معجزہ دکھایا کہ سہیل حنین کی واپسی کے وقت آپ کے ساتھ ہوگئے اورمقام جعرانہ پہنچ کر خلعتِ اسلام سے سرفراز ہوئے آنحضرت ﷺ نے ازراہ مرحمت حنین کے مالِ غنیمت میں سے سو اونٹ عطا فرمائے، گو فتح مکہ کے بعد کے مسلمانوں کا شمار مؤلفۃ القلوب میں ہے، لیکن سہیل اس زمرہ میں اس حیثیت سے ممتاز ہیں کہ اسلام کے بعد ان سے کوئی بات اسلام کے خلاف ظہور پزیر نہیں ہوئی ،حافظ ابن حجرعسقلانی لکھتے ہیں، کان محمودالا سلام من حین اسلم۔", }; + + public static int[][] EXPECTED_TOKENS = new int[][] { + { + 0, + 910, + 256, + 116, + 897, + 65, + 1039, + 830, + 287, + 993, + 660, + 770, + 67, + 619, + 455, + 802, + 73, + 785, + 993, + 990, + 565, + 666, + 194, + 1049, + 110, + 710, + 397, + 283, + 1073, + 666, + 276, + 79, + 486, + 30, + 959, + 912, + 577, + 571, + 658, + 1080, + 327, + 713, + 993, + 457, + 531, + 455, + 553, + 565, + 666, + 46, + 29, + 302, + 993, + 976, + 415, + 155, + 1050, + 956, + 65, + 441, + 65, + 888, + 84, + 511, + 30, + 547, + 908, + 993, + 174, + 350, + 74, + 454, + 500, + 139, + 1026, + 29, + 716, + 337, + 259, + 74, + 874, + 767, + 716, + 961, + 654, + 668, + 460, + 627, + 845, + 577, + 502, + 59, + 30, + 728, + 546, + 140, + 804, + 659, + 67, + 792, + 716, + 358, + 713, + 993, + 783, + 755, + 330, + 278, + 755, + 925, + 74, + 30, + 871, + 993, + 416, + 767, + 1040, + 713, + 331, + 1016, + 460, + 668, + 419, + 568, + 148, + 326, + 306, + 30, + 440, + 36, + 742, + 398, + 727, + 993, + 389, + 795, + 373, + 1009, + 681, + 577, + 455, + 410, + 246, + 1062, + 29, + 641, + 993, + 788, + 921, + 413, + 483, + 329, + 737, + 331, + 1016, + 413, + 1040, + 713, + 482, + 23, + 29, + 253, + 365, + 489, + 457, + 642, + 29, + 544, + 778, + 1077, + 68, + 27, + 379, + 59, + 30, + 639, + 965, + 48, + 52, + 851, + 773, + 331, + 1012, + 1076, + 481, + 661, + 461, + 331, + 767, + 166, + 1010, + 285, + 716, + 662, + 999, + 461, + 668, + 132, + 767, + 936, + 67, + 533, + 166, + 929, + 1046, + 677, + 456, + 124, + 30, + 440, + 183, + 954, + 730, + 65, + 716, + 358, + 713, + 993, + 455, + 637, + 748, + 40, + 472, + 149, + 527, + 709, + 29, + 490, + 1077, + 74, + 777, + 629, + 823, + 665, + 367, + 457, + 560, + 417, + 497, + 478, + 888, + 889, + 684, + 821, + 65, + 441, + 65, + 605, + 74, + 679, + 840, + 736, + 150, + 666, + 30, + 479, + 527, + 709, + 94, + 510, + 864, + 455, + 1074, + 667, + 453, + 308, + 74, + 390, + 74, + 647, + 733, + 469, + 265, + 67, + 764, + 30, + 15, + 490, + 993, + 29, + 447, + 971, + 123, + 29, + 501, + 65, + 668, + 559, + 461, + 591, + 737, + 29, + 449, + 233, + 1034, + 16, + 121, + 993, + 428, + 528, + 65, + 474, + 455, + 1056, + 275, + 324, + 29, + 718, + 991, + 717, + 473, + 980, + 668, + 390, + 67, + 716, + 711, + 464, + 224, + 1073, + 666, + 29, + 811, + 990, + 29, + 888, + 616, + 191, + 184, + 768, + 709, + 846, + 62, + 994, + 144, + 30, + 142, + 409, + 976, + 415, + 65, + 326, + 888, + 575, + 543, + 384, + 537, + 17, + 1029, + 668, + 343, + 331, + 1012, + 30, + 422, + 44, + 33, + 1036, + 279, + 67, + 1053, + 976, + 415, + 65, + 326, + 101, + 1029, + 54, + 1027, + 272, + 874, + 65, + 331, + 1016, + 2 }, + { + 0, + 433, + 204, + 360, + 870, + 514, + 962, + 449, + 295, + 962, + 624, + 208, + 497, + 995, + 1071, + 65, + 538, + 412, + 760, + 883, + 592, + 422, + 707, + 858, + 1032, + 422, + 44, + 34, + 875, + 72, + 1032, + 716, + 254, + 896, + 600, + 24, + 873, + 514, + 29, + 695, + 425, + 1071, + 65, + 538, + 412, + 760, + 98, + 896, + 600, + 24, + 273, + 30, + 729, + 960, + 188, + 1001, + 596, + 497, + 497, + 485, + 76, + 178, + 579, + 679, + 914, + 950, + 74, + 459, + 883, + 514, + 686, + 21, + 80, + 741, + 745, + 962, + 781, + 70, + 716, + 1003, + 151, + 455, + 596, + 522, + 638, + 310, + 65, + 1066, + 1020, + 30, + 2 }, + { + 0, + 716, + 725, + 652, + 77, + 9, + 444, + 463, + 20, + 232, + 10, + 270, + 427, + 886, + 444, + 463, + 20, + 588, + 85, + 4, + 470, + 886, + 692, + 444, + 463, + 22, + 28, + 24, + 71, + 232, + 539, + 100, + 975, + 6, + 146, + 886, + 534, + 4, + 362, + 432, + 122, + 100, + 104, + 90, + 51, + 992, + 39, + 359, + 997, + 32, + 317, + 100, + 292, + 424, + 6, + 716, + 725, + 171, + 582, + 96, + 49, + 58, + 516, + 705, + 100, + 320, + 377, + 968, + 701, + 333, + 86, + 47, + 610, + 886, + 33, + 979, + 115, + 232, + 539, + 731, + 586, + 581, + 1063, + 71, + 664, + 1075, + 6, + 2 }, + { + 0, + 548, + 1013, + 948, + 854, + 215, + 716, + 799, + 867, + 865, + 532, + 953, + 499, + 298, + 758, + 853, + 107, + 819, + 498, + 865, + 314, + 657, + 847, + 274, + 60, + 117, + 395, + 190, + 985, + 402, + 578, + 267, + 352, + 231, + 861, + 154, + 943, + 402, + 271, + 525, + 743, + 135, + 774, + 374, + 590, + 352, + 231, + 274, + 1078, + 117, + 107, + 819, + 498, + 400, + 361, + 282, + 738, + 271, + 439, + 1021, + 849, + 1038, + 274, + 243, + 673, + 93, + 117, + 484, + 797, + 117, + 716, + 200, + 127, + 861, + 825, + 219, + 852, + 402, + 271, + 669, + 854, + 215, + 865, + 923, + 107, + 819, + 498, + 394, + 931, + 361, + 716, + 941, + 11, + 861, + 154, + 943, + 60, + 670, + 402, + 669, + 854, + 215, + 716, + 799, + 861, + 385, + 117, + 1013, + 948, + 854, + 215, + 948, + 838, + 948, + 238, + 91, + 948, + 831, + 963, + 832, + 894, + 108, + 853, + 402, + 525, + 899, + 913, + 12, + 703, + 562, + 1038, + 274, + 900, + 798, + 117, + 554, + 688, + 815, + 117, + 958, + 45, + 117, + 535, + 800, + 782, + 958, + 613, + 117, + 926, + 761, + 117, + 926, + 1008, + 117, + 957, + 1004, + 853, + 117, + 958, + 31, + 207, + 859, + 271, + 525, + 198, + 1014, + 618, + 926, + 406, + 525, + 675, + 211, + 809, + 1048, + 152, + 905, + 689, + 716, + 30, + 525, + 905, + 211, + 412, + 615, + 849, + 854, + 215, + 24, + 706, + 271, + 107, + 819, + 498, + 271, + 657, + 847, + 506, + 5, + 569, + 63, + 363, + 963, + 832, + 271, + 894, + 108, + 853, + 1022, + 1030, + 378, + 635, + 30, + 669, + 854, + 215, + 716, + 799, + 325, + 651, + 355, + 1052, + 229, + 274, + 813, + 899, + 93, + 117, + 525, + 1059, + 860, + 325, + 506, + 353, + 220, + 891, + 119, + 789, + 669, + 854, + 215, + 716, + 799, + 301, + 63, + 848, + 714, + 550, + 749, + 614, + 865, + 754, + 423, + 849, + 865, + 443, + 274, + 562, + 271, + 301, + 63, + 848, + 693, + 550, + 749, + 614, + 865, + 423, + 849, + 159, + 192, + 612, + 274, + 566, + 608, + 562, + 1038, + 904, + 669, + 854, + 215, + 716, + 799, + 982, + 125, + 898, + 847, + 687, + 744, + 562, + 117, + 368, + 849, + 690, + 578, + 400, + 865, + 720, + 262, + 806, + 933, + 789, + 587, + 536, + 789, + 202, + 861, + 266, + 769, + 60, + 63, + 1043, + 865, + 576, + 977, + 601, + 271, + 190, + 353, + 669, + 854, + 215, + 861, + 369, + 280, + 102, + 861, + 82, + 126, + 964, + 852, + 30, + 669, + 854, + 215, + 716, + 799, + 986, + 849, + 747, + 274, + 407, + 234, + 213, + 607, + 933, + 125, + 891, + 849, + 746, + 274, + 548, + 808, + 294, + 839, + 828, + 852, + 187, + 1018, + 771, + 716, + 766, + 747, + 274, + 19, + 78, + 347, + 933, + 271, + 578, + 451, + 849, + 747, + 820, + 509, + 789, + 548, + 203, + 948, + 716, + 739, + 648, + 716, + 799, + 865, + 772, + 861, + 25, + 274, + 227, + 380, + 672, + 102, + 347, + 933, + 117, + 525, + 735, + 214, + 271, + 952, + 1059, + 860, + 972, + 775, + 402, + 578, + 525, + 315, + 861, + 462, + 529, + 789, + 394, + 715, + 274, + 120, + 861, + 716, + 1058, + 418, + 241, + 824, + 1038, + 865, + 318, + 853, + 756, + 669, + 854, + 215, + 716, + 799, + 189, + 436, + 672, + 816, + 687, + 378, + 635, + 716, + 30, + 2 }, + { + 0, + 268, + 951, + 7, + 716, + 903, + 865, + 584, + 168, + 887, + 229, + 653, + 932, + 421, + 217, + 932, + 386, + 653, + 932, + 865, + 716, + 835, + 143, + 612, + 904, + 593, + 363, + 904, + 411, + 203, + 853, + 21, + 421, + 716, + 835, + 185, + 387, + 81, + 209, + 597, + 865, + 296, + 862, + 901, + 223, + 1005, + 670, + 437, + 1033, + 8, + 412, + 176, + 24, + 704, + 687, + 782, + 289, + 421, + 716, + 835, + 185, + 776, + 853, + 81, + 716, + 835, + 716, + 466, + 26, + 421, + 656, + 404, + 714, + 550, + 749, + 614, + 865, + 716, + 835, + 716, + 1031, + 853, + 690, + 519, + 902, + 850, + 716, + 835, + 904, + 128, + 958, + 625, + 230, + 421, + 1037, + 225, + 934, + 958, + 625, + 230, + 421, + 1037, + 225, + 934, + 345, + 421, + 716, + 1031, + 942, + 421, + 371, + 550, + 408, + 307, + 951, + 412, + 807, + 24, + 656, + 404, + 714, + 550, + 749, + 614, + 865, + 555, + 219, + 230, + 690, + 491, + 716, + 835, + 904, + 924, + 958, + 625, + 230, + 307, + 951, + 412, + 810, + 24, + 656, + 404, + 714, + 550, + 749, + 614, + 865, + 396, + 865, + 125, + 1002, + 636, + 690, + 519, + 902, + 850, + 716, + 835, + 904, + 125, + 1002, + 636, + 307, + 951, + 412, + 815, + 24, + 206, + 312, + 791, + 932, + 904, + 617, + 307, + 951, + 2 }, + { + 0, + 349, + 948, + 927, + 186, + 861, + 556, + 779, + 763, + 83, + 112, + 180, + 779, + 1000, + 496, + 750, + 784, + 521, + 967, + 263, + 435, + 112, + 180, + 322, + 1069, + 967, + 83, + 322, + 179, + 611, + 714, + 550, + 749, + 614, + 133, + 762, + 321, + 967, + 841, + 780, + 493, + 83, + 263, + 1051, + 356, + 465, + 515, + 555, + 779, + 1081, + 344, + 779, + 1061, + 133, + 939, + 321, + 782, + 915, + 589, + 922, + 83, + 556, + 682, + 763, + 420, + 694, + 555, + 442, + 820, + 644, + 791, + 197, + 342, + 247, + 814, + 1017, + 685, + 782, + 915, + 480, + 35, + 915, + 645, + 133, + 1041, + 552, + 967, + 106, + 623, + 357, + 622, + 83, + 526, + 442, + 245, + 1024, + 197, + 1067, + 932, + 83, + 988, + 162, + 632, + 83, + 556, + 779, + 545, + 698, + 934, + 83, + 814, + 1072, + 2 }, + { + 0, + 335, + 316, + 779, + 467, + 572, + 656, + 1015, + 693, + 550, + 749, + 614, + 779, + 917, + 43, + 779, + 555, + 83, + 239, + 372, + 133, + 430, + 1023, + 944, + 291, + 1079, + 782, + 893, + 996, + 245, + 719, + 808, + 779, + 628, + 934, + 83, + 557, + 133, + 309, + 332, + 656, + 1015, + 693, + 550, + 749, + 614, + 322, + 545, + 780, + 125, + 517, + 157, + 985, + 595, + 236, + 779, + 589, + 945, + 83, + 197, + 615, + 175, + 732, + 884, + 948, + 134, + 257, + 708, + 83, + 716, + 319, + 740, + 937, + 694, + 966, + 951, + 2 }, + { + 0, + 716, + 328, + 391, + 1070, + 934, + 779, + 338, + 399, + 83, + 496, + 782, + 293, + 83, + 989, + 133, + 564, + 348, + 947, + 177, + 779, + 836, + 949, + 496, + 782, + 716, + 328, + 779, + 523, + 383, + 947, + 177, + 255, + 682, + 836, + 934, + 103, + 782, + 316, + 779, + 216, + 162, + 609, + 199, + 83, + 1081, + 753, + 393, + 602, + 420, + 694, + 434, + 494, + 129, + 498, + 862, + 248, + 261, + 35, + 13, + 1057, + 316, + 779, + 475, + 656, + 1015, + 693, + 550, + 749, + 614, + 322, + 697, + 136, + 163, + 197, + 650, + 942, + 245, + 817, + 180, + 780, + 113, + 906, + 723, + 2 }, + { + 0, + 339, + 722, + 145, + 196, + 740, + 899, + 244, + 92, + 492, + 55, + 299, + 247, + 680, + 714, + 550, + 749, + 614, + 780, + 818, + 182, + 567, + 796, + 520, + 247, + 787, + 205, + 779, + 583, + 391, + 1048, + 682, + 890, + 236, + 899, + 281, + 674, + 1006, + 911, + 160, + 879, + 83, + 321, + 782, + 303, + 716, + 909, + 948, + 250, + 346, + 716, + 790, + 934, + 860, + 236, + 303, + 173, + 948, + 540, + 716, + 790, + 974, + 118, + 863, + 815, + 862, + 368, + 693, + 550, + 749, + 614, + 779, + 340, + 947, + 313, + 724, + 726, + 861, + 351, + 157, + 83, + 928, + 521, + 787, + 594, + 322, + 699, + 429, + 626, + 99, + 780, + 341, + 18, + 827, + 541, + 860, + 958, + 585, + 182, + 693, + 550, + 749, + 614, + 779, + 861, + 947, + 313, + 407, + 862, + 477, + 83, + 130, + 521, + 603, + 83, + 133, + 448, + 947, + 313, + 351, + 157, + 83, + 928, + 880, + 915, + 779, + 475, + 594, + 322, + 201, + 111, + 347, + 193, + 37, + 842, + 75, + 18, + 69, + 322, + 907, + 221, + 162, + 159, + 860, + 807, + 859, + 826, + 323, + 819, + 197, + 161, + 817, + 169, + 197, + 125, + 860, + 848, + 323, + 942, + 300, + 255, + 682, + 716, + 218, + 934, + 222, + 381, + 694, + 526, + 712, + 782, + 786, + 573, + 382, + 949, + 197, + 573, + 179, + 66, + 782, + 83, + 322, + 786, + 934, + 877, + 264, + 580, + 604, + 1042, + 782, + 228, + 938, + 476, + 66, + 442, + 468, + 41, + 782, + 442, + 235, + 255, + 83, + 1045, + 114, + 492, + 56, + 66, + 442, + 940, + 765, + 133, + 940, + 765, + 899, + 561, + 492, + 56, + 471, + 260, + 643, + 255, + 899, + 973, + 784, + 66, + 197, + 945, + 35, + 1064, + 366, + 66, + 782, + 611, + 304, + 882, + 907, + 442, + 1047, + 702, + 791, + 818, + 1028, + 967, + 503, + 452, + 133, + 872, + 195, + 249, + 899, + 915, + 50, + 95, + 442, + 446, + 895, + 967, + 599, + 164, + 162, + 431, + 779, + 1000, + 626, + 170, + 859, + 803, + 806, + 862, + 847, + 882, + 934, + 859, + 125, + 862, + 847, + 290, + 806, + 611, + 860, + 159, + 860, + 812, + 859, + 848, + 290, + 815, + 855, + 143, + 290, + 824, + 860, + 852, + 866, + 847, + 860, + 611, + 862, + 450, + 105, + 236, + 392, + 247, + 197, + 503, + 938, + 602, + 945, + 805, + 288, + 655, + 806, + 197, + 869, + 935, + 690, + 1007, + 158, + 412, + 885, + 236, + 56, + 24, + 197, + 35, + 815, + 255, + 83, + 258, + 1024, + 1079, + 1048, + 691, + 620, + 934, + 876, + 951, + 197, + 237, + 981, + 920, + 192, + 790, + 974, + 984, + 676, + 862, + 458, + 714, + 550, + 749, + 614, + 671, + 969, + 83, + 351, + 157, + 354, + 682, + 342, + 563, + 526, + 251, + 918, + 321, + 967, + 14, + 14, + 611, + 304, + 882, + 322, + 680, + 714, + 550, + 749, + 614, + 671, + 445, + 83, + 699, + 967, + 14, + 14, + 83, + 125, + 860, + 649, + 57, + 388, + 779, + 881, + 694, + 255, + 791, + 782, + 197, + 57, + 131, + 860, + 88, + 862, + 477, + 97, + 446, + 644, + 791, + 782, + 678, + 862, + 549, + 197, + 162, + 405, + 899, + 125, + 860, + 849, + 682, + 334, + 948, + 700, + 791, + 197, + 138, + 1044, + 401, + 878, + 633, + 899, + 945, + 682, + 53, + 694, + 255, + 370, + 930, + 882, + 899, + 945, + 682, + 137, + 311, + 782, + 1055, + 899, + 945, + 682, + 109, + 782, + 164, + 162, + 716, + 570, + 899, + 125, + 860, + 849, + 780, + 61, + 117, + 558, + 852, + 574, + 899, + 125, + 860, + 849, + 780, + 634, + 117, + 305, + 899, + 945, + 780, + 542, + 808, + 782, + 438, + 862, + 892, + 899, + 125, + 860, + 849, + 780, + 507, + 782, + 504, + 899, + 125, + 860, + 849, + 682, + 548, + 197, + 316, + 899, + 125, + 860, + 849, + 780, + 249, + 948, + 700, + 370, + 505, + 125, + 860, + 849, + 682, + 2 }, + { + 0, + 212, + 495, + 656, + 1015, + 693, + 550, + 749, + 614, + 779, + 915, + 716, + 558, + 162, + 595, + 848, + 322, + 342, + 243, + 721, + 934, + 1019, + 153, + 967, + 112, + 180, + 716, + 739, + 182, + 780, + 640, + 779, + 361, + 680, + 779, + 1000, + 518, + 197, + 848, + 426, + 135, + 987, + 284, + 414, + 694, + 1037, + 983, + 862, + 316, + 133, + 752, + 1079, + 656, + 1015, + 714, + 550, + 749, + 614, + 322, + 916, + 794, + 934, + 159, + 512, + 808, + 716, + 739, + 182, + 779, + 753, + 862, + 970, + 92, + 83, + 133, + 998, + 947, + 177, + 97, + 446, + 702, + 782, + 829, + 978, + 557, + 779, + 475, + 779, + 277, + 682, + 487, + 240, + 87, + 937, + 955, + 837, + 83, + 321, + 782, + 513, + 112, + 180, + 915, + 336, + 83, + 915, + 252, + 133, + 734, + 521, + 967, + 316, + 779, + 475, + 945, + 133, + 181, + 946, + 316, + 779, + 226, + 141, + 934, + 172, + 209, + 822, + 169, + 1035, + 1068, + 117, + 761, + 669, + 364, + 833, + 824, + 42, + 286, + 508, + 521, + 782, + 690, + 663, + 156, + 488, + 251, + 1065, + 915, + 89, + 951, + 2 }, }; + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java index bff2c6a94d789..3fd51601e0138 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java @@ -124,6 +124,30 @@ public void testMultiByteEmoji() throws IOException { } } + public void testMultilingual() throws IOException { + var vocab = XLMRobertaTestVocab.loadMultiLingualTestVocab(); + + try ( + XLMRobertaTokenizer tokenizer = XLMRobertaTokenizer.builder( + vocab.get(), + vocab.scores(), + new XLMRobertaTokenization(false, null, Tokenization.Truncate.NONE, -1) + ).setWithSpecialTokens(true).build() + ) { + for (int i = 0; i < XLMRobertaTestVocab.MULTILINUGAL_TEXTS.length; i++) { + logger.info(i); + TokenizationResult.Tokens tokenization = tokenizer.tokenize( + XLMRobertaTestVocab.MULTILINUGAL_TEXTS[i], + Tokenization.Truncate.FIRST, + -1, + 0, + null + ).get(0); + assertArrayEquals(XLMRobertaTestVocab.EXPECTED_TOKENS[i], tokenization.tokenIds()); + } + } + } + public void testTokenizeWithNeverSplit() throws IOException { try ( XLMRobertaTokenizer tokenizer = XLMRobertaTokenizer.builder( diff --git a/x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json b/x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json new file mode 100644 index 0000000000000..99f7d2ae34a04 --- /dev/null +++ b/x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json @@ -0,0 +1,2172 @@ +{ + "model_id": "anything_to_satisfy_the_parser", + "vocabulary": [ + "", + "", + "", + "", + "、", + "▁نسبت", + "。", + "▁ھن", + "▁ھو", + "「", + "」", + "فض", + "▁سار", + "▁پہنچے", + "’", + "▁Nakon", + "nova", + "jena", + "!", + "▁حديث", + "工業", + "'", + "(", + "▁soba", + ")", + "▁شوق", + "في", + "▁pod", + "株", + ",", + ".", + "قر", + "進", + "3", + "8", + "▁پر", + "▁pot", + ":", + "▁مختار", + "格", + "▁pos", + "▁مزاج", + "قل", + "قم", + "⁄", + "فسر", + "ću", + "く", + "▁bila", + "あげ", + "▁طرح", + "し", + "▁vrhunski", + "▁شعار", + "▁jednu", + "▁والی", + "▁والے", + "▁ہر", + "た", + "a", + "▁ٿي", + "▁طبیعت", + "b", + "▁ڪري", + "▁حدیث", + "e", + "▁ہو", + "g", + "h", + "▁ہم", + "k", + "に", + "l", + "ština", + "m", + "▁نبی", + "n", + "の", + "▁لکڻ", + "s", + "t", + "▁سان", + "▁سبب", + "▁میں", + "▁lit", + "へ", + "べ", + "لف", + "لق", + "لم", + "や", + "ثير", + "مت", + "لي", + "▁pri", + "▁راست", + "り", + "▁عطا", + "▁66", + "خدا", + "を", + "▁kuća", + "▁ڪرڻ", + "▁تھا", + "増", + "▁پکار", + "▁طا", + "▁طب", + "مل", + "▁راز", + "▁prenosi", + "نا", + "▁سہ", + "▁طرف", + "▁بلند", + "英文", + "vne", + "▁،", + "▁سی", + "ائين", + "▁زمين", + "▁zbog", + "点", + "▁stoljeća", + "jeti", + "▁ا", + "▁ب", + "ني", + "▁امير", + "▁آ", + "▁آئے", + "▁خ", + "vod", + "▁سے", + "▁ت", + "▁ج", + "▁ص", + "▁ض", + "▁ط", + "čenja", + "buku", + "▁ظ", + "▁90", + "▁ر", + "vom", + "▁ز", + "▁1990", + "▁ش", + "vor", + "ena", + "ologi", + "▁yn", + "الت", + "یا", + "▁ع", + "ene", + "الا", + "یت", + "وا", + "▁م", + "یح", + "▁ن", + "▁و", + "ید", + "▁صدق", + "▁عن", + "▁za", + "▁ف", + "▁ق", + "یر", + "▁ل", + "者が", + "ور", + "▁عبداللہ", + "▁obra", + "وع", + "الف", + "نٹ", + "ô", + "▁رسول", + "یل", + "▁کوئی", + "ین", + "▁zi", + "uncu", + "▁ٻي", + "لہ", + "▁انجام", + "▁fan", + "▁پنهنجو", + "▁اهو", + "▁vrh", + "بار", + "▁کیا", + "ć", + "▁مستقیم", + "لی", + "▁اور", + "▁اهي", + "لے", + "باط", + "▁پڑھ", + "▁اوهان", + "احمد", + "isto", + "▁ہاتھوں", + "▁پنھنجي", + "يء", + "▁lân", + "▁پ", + "مہ", + "يت", + "▁بال", + "يد", + "▁خرچ", + "ير", + "▁جذب", + "▁لا", + "نگ", + "وار", + "▁ڏ", + "▁شاہد", + "بان", + "▁قرار", + "vanja", + "يف", + "▁خلاف", + "▁بغداد", + "نہ", + "اني", + "ين", + "يه", + "社", + "▁njihova", + "▁مج", + "▁بازار", + "نے", + "▁ک", + "▁ڪ", + "▁تشریف", + "▁مؤ", + "▁ملي", + "ایت", + "▁مع", + "▁ملا", + "▁ہ", + "▁plod", + "یں", + "▁نب", + "▁ملت", + "▁ی", + "▁من", + "▁حیثیت", + "▁ali", + "660", + "وں", + "đe", + "یہ", + "▁لپ", + "đi", + "▁خطا", + "وی", + "▁پڇ", + "▁ایک", + "▁نام", + "šnje", + "▁پٽ", + "▁سلطنت", + "▁۱", + "▁کثیر", + "から", + "▁۽", + "▁dvije", + "▁breed", + "▁۾", + "▁200", + "▁Ana", + "▁مسلمانوں", + "oče", + "▁ukupno", + "▁حاصل", + "▁ترجیح", + "▁پڻ", + "ama", + "انہ", + "tru", + "انی", + "▁dokaz", + "▁آنکھیں", + "▁لکي", + "َّ", + "سلام", + "始", + "▁عرب", + "▁جهڙو", + "cre", + "▁لاء", + "نام", + "▁224", + "▁عورت", + "ُّ", + "▁نبي", + "▁kada", + "▁حضرت", + "▁تعالی", + "▁عدل", + "▁metara", + "▁چئي", + "▁moguće", + "▁مدینہ", + "plo", + "میر", + "▁عبادت", + "صاف", + "▁شهر", + "▁والد", + "▁اسلام", + "出", + "▁آمدن", + "موا", + "世界", + "▁ہے", + "▁نے", + "ِّ", + "▁godina", + "▁هڪ", + "nih", + "▁ostvari", + "ضرار", + "jal", + "▁razm", + "▁prostor", + "▁گئے", + "させる", + "▁لباس", + "▁دعوت", + "▁زمرہ", + "tvr", + "▁اصحاب", + "▁(2)", + "▁وہ", + "▁قسم", + "▁یہ", + "▁deset", + "▁عمر", + "▁المسلمين", + "سار", + "▁شروع", + "▁دولت", + "▁سعد", + "đeni", + "▁تور", + "▁عباس", + "▁خواب", + "▁شریف", + "ودي", + "ڑی", + "▁عمو", + "stavlja", + "的な", + "ary", + "▁وقت", + "海外", + "▁پاڻ", + "▁حجر", + "▁uko", + "▁فرماتے", + "▁pomaže", + "▁عالم", + "▁علم", + "▁گا۔", + "▁رضي", + "▁لانے", + "tili", + "▁عهد", + "▁مخلوق", + "▁ہے۔", + "企業", + "ندا", + "rum", + "▁ڏانهن", + "▁بنا", + "▁بند", + "▁ہزار", + "▁ispod", + "▁نالو", + "وري", + "▁نالي", + "▁خوبی", + "▁sklad", + "▁konačno", + "▁اپنے", + "▁لگ", + "▁ود", + "▁ور", + "▁هي", + "▁خاندان", + "▁generacij", + "lji", + "▁ثروت", + "▁هن", + "▁وا", + "▁هو", + "▁معرفت", + "حضور", + "قار", + "قاب", + "▁قرآن", + "▁عنه", + "▁%", + "▁druge", + "▁'", + "▁(", + "▁-", + "▁پہنچ", + "građ", + "▁materijal", + "nom", + "ڙو", + "▁6", + "▁چھوڑ", + "▁يا", + "▁1", + "▁هٿ", + "動", + "▁4", + "قام", + "1959", + "šne", + "▁:", + "▁پہلے", + "▁یقین", + "拠", + "▁H", + "▁خالی", + "▁مرتبہ", + "▁گذر", + "▁آھي", + "▁اظہار", + "▁مذهبي", + "▁U", + "▁građevin", + "▁نہ", + "▁وچ", + "太平洋", + "▁صفت", + "▁فرما", + "▁tijekom", + "▁بعض", + "▁a", + "‘‘", + "▁سوره", + "▁برکت", + "▁o", + "▁p", + "▁i", + "▁l", + "▁u", + "▁رسالت", + "jit", + "▁veličine", + "▁s", + "▁انتقال", + "化学", + "ša", + "▁صرف", + "خلي", + "▁آغاز", + "▁سخت", + "▁onda", + "1987", + "▁بلکہ", + "ječ", + "▁puno", + "▁traje", + "▁بعد", + "خلق", + "▁کریم", + "iranju", + "▁Ta", + "▁عہد", + "log", + "▁ispred", + "▁ceremoni", + "▁خوا", + "▁liker", + "azi", + "▁شمار", + "▁سلام", + "pane", + "▁što", + "▁علاوه", + "▁کرنے", + "▁راہ", + "▁ہاتھ", + "آخر", + "▁تھے", + "▁dat", + "ستان", + "عادت", + "ješ", + "▁šum", + "▁kruh", + "▁تمہاری", + "▁ہدایت", + "▁احمد", + "▁ڏينهن", + "▁شریعت", + "▁لکھتے", + "▁ٿيا", + "dono", + "icama", + "رحم", + "▁لیکن", + "▁wie", + "▁کرنا", + "化粧品", + "جنب", + "▁ہوگئے", + "▁علاوہ", + "▁پات", + "▁ہیں", + "▁dei", + "▁پاس", + "▁حال", + "▁سندس", + "▁بھی", + "▁stab", + "▁klim", + "▁ٿيو", + "▁محبت", + "▁12.", + "▁ولادت", + "▁zraka", + "以降", + "▁المو", + "▁چيو", + "▁sti", + "den", + "名", + "▁عمرو", + "▁سید", + "▁سیر", + "bina", + "▁nešto", + "▁غربت", + "▁ž", + "▁Zi", + "▁امام", + "▁قلب", + "▁الله", + "▁قلت", + "▁رہے", + "▁13.", + "▁المج", + "▁گھر", + "▁شام", + "▁مکہ", + "عفو", + "▁kraja", + "▁preciz", + "▁دفع", + "▁آهن", + "▁مضمون", + "▁بڑی", + "▁stol", + "▁پٿر", + "▁انور", + "▁čet", + "▁هجڻ", + "وفا", + "▁svojih", + "▁یعنی", + "▁میرے", + "▁کرم", + "seo", + "▁خدمت", + "ce", + "▁جڏهن", + "ch", + "▁متو", + "ACI", + "つく", + "▁بجائے", + "▁مطابق", + "رسل", + "REP", + "▁انهن", + "株式会社", + "▁لیے", + "▁خلافت", + "ruše", + "wol", + "▁جيڪو", + "▁انہوں", + "کر", + "en", + "ڪاري", + "▁مختلف", + "▁لوگ", + "et", + "▁ڏيندو", + "ولت", + "▁انہیں", + "کل", + "▁metodo", + "▁منزل", + "▁حفظ", + "يون", + "ولو", + "2002", + "▁اللہ", + "ڪن", + "حدث", + "▁وسلم", + "اب", + "▁svojem", + "▁مسجد", + "ئي", + "▁znanja", + "اد", + "▁پیدا", + "اس", + "عون", + "▁stik", + "ومن", + "▁’’", + "▁št", + "▁زمان", + "he", + "▁قبائل", + "ال", + "با", + "ان", + "▁عادت", + "▁هئا", + "▁بيت", + "▁lokal", + "▁be", + "▁Kiva", + "▁واپسی", + "▁dok", + "▁zemlju", + "▁کار", + "ؤں", + "▁سختی", + "▁طیب", + "▁slo", + "بل", + "نہیں", + "بن", + "▁آس", + "当初", + "▁گرو", + "im", + "▁بین", + "▁آن", + "▁آم", + "▁dos", + "▁ci", + "▁akumul", + "▁oblika", + "zimi", + "▁محمود", + "改", + "zima", + "je", + "ji", + "▁do", + "▁ابن", + "▁ويو", + "کی", + "▁سفر", + "تز", + "▁دیتی", + "▁بزرگ", + "کار", + "ju", + "▁اطمینان", + "▁de", + "▁آپ", + "tari", + "▁کا", + "لفت", + "ka", + "گا", + "▁dy", + "ائي", + "ته", + "▁ٿيون", + "▁کان", + "▁کش", + "には", + "صلى", + "▁کر", + "▁en", + "ثر", + "▁بنی", + "▁کد", + "▁فرمایا", + "اؤں", + "成長", + "ئے", + "▁لقب", + "▁ڳالھ", + "会社", + "▁هئي", + "▁fur", + "اں", + "la", + "le", + "uru", + "جا", + "li", + "▁صلى", + "ثي", + "▁", + "lo", + "▁nije", + "جر", + "▁تعبير", + "جز", + "▁اگر", + "ا۔", + "▁دریافت", + "創業", + "▁کئے", + "ma", + "▁Kao", + "▁Men", + "▁konstrukcij", + "AMO", + "بی", + "mu", + "▁ممتاز", + "▁سڄو", + "kron", + "ne", + "▁سياست", + "حن", + "خا", + "▁men", + "krov", + "▁سڄي", + "▁رهيا", + "▁mei", + "▁سالم", + "▁سالن", + "no", + "▁عليه", + "▁بیان", + "▁گمراہ", + "▁سرفراز", + "▁مال", + "▁ٻنهي", + "nu", + "▁مان", + "▁رفع", + "▁هجر", + "▁اختلاف", + "of", + "حافظ", + "▁سنا", + "▁جہاد", + "▁društva", + "ائی", + "نون", + "om", + "▁osjeća", + "▁وڏو", + "irano", + "ئين", + "▁زيارت", + "irana", + "مار", + "دو", + "▁طريق", + "▁ar", + "▁poput", + "▁کے", + "▁کی", + "▁jo", + "،", + "▁gli", + "▁کرتے", + "▁koje", + "▁تم", + "▁تو", + "▁ki", + "▁ته", + "ؓ", + "▁گا", + "lā", + "جہ", + "را", + "iš", + "▁دیکھ", + "رج", + "مام", + "رح", + "رخ", + "▁تع", + "▁vje", + "آ", + "▁između", + "دھی", + "ا", + "ب", + "ت", + "▁تي", + "ج", + "▁koja", + "ح", + "▁اک", + "▁جان", + "د", + "▁فرم", + "ذ", + "▁جب", + "ر", + "▁جا", + "▁pojedin", + "ز", + "olo", + "س", + "▁گه", + "ش", + "▁حضور", + "ض", + "▁گو", + "▁Verde", + "▁غالب", + "ري", + "ع", + "▁گم", + "ڪنھن", + "▁گل", + "لوب", + "▁يزيد", + "▁فري", + "ndro", + "▁خدا", + "▁اے", + "ف", + "ق", + "ru", + "▁nepo", + "ل", + "م", + "ن", + "ه", + "▁dizajn", + "و", + "ي", + "▁جر", + "ٌ", + "▁جائے", + "ٍ", + "long", + "َ", + "ُ", + "▁جو", + "ِ", + "ّ", + "se", + "▁جي", + "ْ", + "▁جن", + "ٖ", + "▁بہ", + "sk", + "▁Tim", + "▁غیر", + "▁lang", + "▁kamen", + "▁my", + "▁ہوجائیں", + "ارا", + "▁حس", + "▁بخاری", + "▁ہیں۔", + "▁قابل", + "ٰ", + "te", + "دہ", + "سن", + "年", + "ادي", + "▁na", + "stan", + "ٹ", + "ٺ", + "▁حق", + "پ", + "▁الآ", + "دے", + "▁fo", + "▁struktur", + "ڇ", + "▁کو", + "▁الا", + "▁ممنوع", + "ڍ", + "شق", + "▁کي", + "▁ڪن", + "▁بھیج", + "▁تمہیں", + "dali", + "عطاء", + "▁Gra", + "صح", + "un", + "ڙ", + "▁gr", + "▁اس", + "▁از", + "▁ار", + "قول", + "ک", + "▁اح", + "ve", + "▁اب", + "▁ٻيهر", + "▁ام", + "▁vodo", + "▁ال", + "▁فضا", + "▁مذکور", + "▁klimat", + "▁تقوی", + "ڻ", + "ھ", + "▁ڪيائين", + "ہ", + "رے", + "▁hladno", + "ۃ", + "▁بد", + "▁بہتر", + "▁بر", + "روا", + "ی", + "روج", + "▁با", + "▁ان", + "▁بات", + "▁او", + "▁بن", + "ے", + "ûn", + "۔", + "▁بار", + "▁باس", + "dane", + "▁الق", + "▁najpoznatij", + "▁الل", + "▁الم", + "▁Vrh", + "▁is", + "oblikovan", + "▁in", + "▁الطب", + "ڻي", + "▁je", + "ادی", + "▁کہ", + "へと", + "▁صفات", + "▁غنی", + "▁nekoliko", + "▁کڻ", + "▁معاف", + "سے", + "変更", + "▁sa", + "▁سرانجام", + "▁فتح", + "月", + "▁vremena", + "عب", + "▁کڻي", + "عت", + "▁سر", + "▁دور", + "▁ست", + "عر", + "▁اسی", + "▁سب", + "▁se", + "▁protek", + "本", + "▁su", + "jedi", + "▁40", + "ھر", + "国際", + "▁سو", + "▁te", + "▁ساتھ", + "▁tink", + "ھل", + "oksen", + "غو", + "▁ڏنو", + "ں۔", + "▁شن", + "علامه", + "▁ži", + "▁va", + "حمد", + "ija", + "▁محمد", + "▁سڀ", + "حضرت", + "ije", + "▁دون", + "▁ڏنا", + "▁دکھا", + "▁koe", + "▁حلق", + "▁سڏ", + "▁مشرف", + "ٹے", + "▁خواہش", + "kom", + "▁ili", + "▁تک", + "▁ima", + "رائي", + "صحاب", + "▁of", + "▁تہ", + "▁ob", + "▁نہیں", + "▁od", + "▁خل", + "▁جنهن", + "▁Mesa", + "▁gradi", + "▁قائم", + "▁رکھا", + "▁دين", + "اعات", + "▁آواز", + "izaci", + "▁اٹھا", + "▁دل", + "ima", + "▁danas", + "▁گھ", + "▁گهر", + "▁broja", + "▁رکھنے", + "▁حکمت", + "▁po", + "▁قبول", + "ٽڪ", + "▁پيء", + "▁مجتمع", + "▁اعمال", + "ine", + "FIC", + "▁احسان", + "▁حین", + "gje", + "▁18", + "▁ہوئی", + "▁کہا", + "▁قبیل", + "▁ro", + "▁دی۔", + "▁civilizaci", + "▁teori", + "めた", + "▁okrug", + "▁današnji", + "▁گذري", + "▁ہوئے", + "ega", + "▁تمام" + ], + "scores": [ + 0.0, + 0.0, + 0.0, + 0.0, + -6.610896110534668, + -11.903949737548828, + -6.411019802093506, + -13.111821174621582, + -12.475632667541504, + -8.94989013671875, + -8.913808822631836, + -12.612136840820312, + -13.197681427001953, + -14.200822830200195, + -6.379403591156006, + -12.10725212097168, + -11.451247215270996, + -12.569819450378418, + -6.61658239364624, + -12.716913223266602, + -12.647109031677246, + -6.345553398132324, + -7.722129821777344, + -13.328119277954102, + -5.9974517822265625, + -13.542387008666992, + -11.525911331176758, + -9.303495407104492, + -13.15868091583252, + -3.4635426998138428, + -3.625642776489258, + -12.082132339477539, + -11.200728416442871, + -8.533885955810547, + -9.478791236877441, + -8.830430030822754, + -9.85542106628418, + -5.629745960235596, + -10000.0, + -11.07493782043457, + -10.675272941589355, + -14.053406715393066, + -12.350106239318848, + -13.201828002929688, + -15.362364768981934, + -14.316963195800781, + -11.72597599029541, + -10.451481819152832, + -10.200002670288086, + -12.970951080322266, + -10.799960136413574, + -9.750066757202148, + -14.09317684173584, + -13.132211685180664, + -12.164549827575684, + -11.790772438049316, + -11.185127258300781, + -11.52169418334961, + -10.283937454223633, + -5.5477118492126465, + -10.650121688842773, + -13.610538482666016, + -8.289443016052246, + -10.517338752746582, + -10000.0, + -5.701941967010498, + -10.288777351379395, + -7.932966709136963, + -7.701241970062256, + -10.9612398147583, + -7.4715776443481445, + -8.438796997070312, + -7.762022495269775, + -13.653663635253906, + -6.647110939025879, + -13.096002578735352, + -6.093497276306152, + -7.835560321807861, + -13.766554832458496, + -5.072621822357178, + -6.071900844573975, + -10.290907859802246, + -11.627830505371094, + -8.404854774475098, + -11.820650100708008, + -11.6625394821167, + -12.946660041809082, + -11.946572303771973, + -13.20298957824707, + -12.11659049987793, + -9.643321990966797, + -12.949349403381348, + -11.78995132446289, + -10.989119529724121, + -8.787092208862305, + -12.648849487304688, + -10.379737854003906, + -13.063958168029785, + -11.535991668701172, + -13.839150428771973, + -8.22523021697998, + -13.274272918701172, + -11.137674331665039, + -10.805622100830078, + -12.990604400634766, + -14.285995483398438, + -13.078483581542969, + -12.852004051208496, + -11.508638381958008, + -12.764389991760254, + -13.693453788757324, + -9.525500297546387, + -13.369109153747559, + -10.901957511901855, + -12.365242004394531, + -12.302881240844727, + -12.062744140625, + -9.150372505187988, + -10.726777076721191, + -12.626052856445312, + -12.744816780090332, + -11.537252426147461, + -10.271102905273438, + -13.577858924865723, + -12.193032264709473, + -9.33310604095459, + -9.089756965637207, + -10.834887504577637, + -13.551883697509766, + -10.807448387145996, + -12.546935081481934, + -10.950186729431152, + -11.474028587341309, + -8.803434371948242, + -9.171648025512695, + -10.806365966796875, + -10.984315872192383, + -12.26717758178711, + -11.871655464172363, + -13.023716926574707, + -13.473764419555664, + -13.253439903259277, + -10.311766624450684, + -10.744394302368164, + -12.47635269165039, + -11.38111400604248, + -11.568384170532227, + -10000.0, + -10.497817039489746, + -10.765369415283203, + -11.620940208435059, + -8.651301383972168, + -12.216012001037598, + -11.396681785583496, + -10.353537559509277, + -9.594635963439941, + -12.703508377075195, + -10.891910552978516, + -9.981459617614746, + -9.07016372680664, + -13.325227737426758, + -9.9458646774292, + -7.174049377441406, + -10.452103614807129, + -13.543808937072754, + -10000.0, + -7.37307596206665, + -10000.0, + -10.899341583251953, + -11.181215286254883, + -9.23928451538086, + -13.12946605682373, + -10.758359909057617, + -14.190896987915039, + -10.881155967712402, + -11.799145698547363, + -13.552739143371582, + -12.35738754272461, + -10.790441513061523, + -11.642875671386719, + -11.203944206237793, + -11.164298057556152, + -10.391376495361328, + -10.602131843566895, + -13.296408653259277, + -12.801287651062012, + -13.29976749420166, + -11.127630233764648, + -9.635873794555664, + -13.025283813476562, + -11.522773742675781, + -12.629497528076172, + -11.675955772399902, + -10.100441932678223, + -9.490818977355957, + -12.654541015625, + -11.304871559143066, + -8.778549194335938, + -12.341999053955078, + -12.693137168884277, + -12.534963607788086, + -12.560155868530273, + -12.807035446166992, + -14.408479690551758, + -10.91116714477539, + -13.606574058532715, + -13.693682670593262, + -11.006491661071777, + -13.511610984802246, + -10.69263744354248, + -10000.0, + -10.88202953338623, + -9.945484161376953, + -11.082690238952637, + -13.169434547424316, + -10.761433601379395, + -12.539514541625977, + -9.714284896850586, + -10.531815528869629, + -11.411252975463867, + -12.159621238708496, + -13.439103126525879, + -12.159096717834473, + -10.569905281066895, + -11.485320091247559, + -11.685418128967285, + -11.131010055541992, + -13.32590389251709, + -12.843395233154297, + -11.425615310668945, + -9.176533699035645, + -10.86955738067627, + -11.128808975219727, + -13.251603126525879, + -12.20699405670166, + -11.551314353942871, + -10.626527786254883, + -11.38455581665039, + -11.614538192749023, + -14.187246322631836, + -12.982544898986816, + -11.797250747680664, + -10000.0, + -9.858101844787598, + -12.285886764526367, + -12.553010940551758, + -13.370101928710938, + -10.696676254272461, + -12.74817180633545, + -12.134454727172852, + -11.036406517028809, + -8.165318489074707, + -13.548136711120605, + -9.375162124633789, + -13.292466163635254, + -9.353793144226074, + -11.82857894897461, + -11.406195640563965, + -13.611187934875488, + -12.325207710266113, + -13.719786643981934, + -11.11467170715332, + -13.631454467773438, + -9.855673789978027, + -10.353020668029785, + -13.05349349975586, + -13.028356552124023, + -13.965872764587402, + -12.046480178833008, + -10000.0, + -9.703826904296875, + -9.335156440734863, + -12.247420310974121, + -13.311925888061523, + -9.245621681213379, + -9.983458518981934, + -10.990195274353027, + -12.795949935913086, + -13.135777473449707, + -13.378888130187988, + -11.403210639953613, + -13.6084566116333, + -12.680025100708008, + -10.440314292907715, + -12.222440719604492, + -11.42122745513916, + -11.383726119995117, + -12.9137601852417, + -14.476696968078613, + -13.467201232910156, + -12.135478973388672, + -12.71440601348877, + -12.663864135742188, + -11.900956153869629, + -13.902737617492676, + -11.216065406799316, + -10.742656707763672, + -10000.0, + -13.595476150512695, + -12.485321044921875, + -13.868348121643066, + -13.327157020568848, + -11.195040702819824, + -11.418901443481445, + -12.40562915802002, + -13.329121589660645, + -13.79185962677002, + -13.747245788574219, + -12.834506034851074, + -14.360300064086914, + -11.659099578857422, + -12.826003074645996, + -13.300061225891113, + -13.438057899475098, + -10.775205612182617, + -11.984052658081055, + -11.038277626037598, + -9.496801376342773, + -13.454340934753418, + -12.906173706054688, + -10.528743743896484, + -9.034796714782715, + -9.208064079284668, + -13.150303840637207, + -11.386055946350098, + -10.825066566467285, + -9.702229499816895, + -13.07643985748291, + -13.973017692565918, + -10.952235221862793, + -13.427935600280762, + -11.593998908996582, + -11.631296157836914, + -12.894272804260254, + -12.372183799743652, + -12.643047332763672, + -13.63158893585205, + -12.459877967834473, + -14.055868148803711, + -9.961138725280762, + -10.343199729919434, + -11.893840789794922, + -10.064606666564941, + -12.285531997680664, + -11.363245010375977, + -13.014981269836426, + -12.707101821899414, + -11.332316398620605, + -11.302778244018555, + -13.055937767028809, + -13.674442291259766, + -11.654837608337402, + -12.616765975952148, + -12.207862854003906, + -11.831457138061523, + -13.68111801147461, + -12.976818084716797, + -14.430807113647461, + -12.421124458312988, + -11.50658893585205, + -11.110703468322754, + -10.588051795959473, + -11.868654251098633, + -12.110957145690918, + -13.921645164489746, + -13.405242919921875, + -13.995997428894043, + -14.098799705505371, + -12.004497528076172, + -11.506338119506836, + -12.163915634155273, + -13.178263664245605, + -14.001510620117188, + -12.172317504882812, + -13.293499946594238, + -10000.0, + -10000.0, + -11.053098678588867, + -11.700507164001465, + -9.89022159576416, + -13.137804985046387, + -11.584470748901367, + -11.47780704498291, + -12.676255226135254, + -13.47961711883545, + -13.04322338104248, + -12.198275566101074, + -13.25001335144043, + -12.475261688232422, + -12.113178253173828, + -13.952543258666992, + -10.791595458984375, + -12.600780487060547, + -12.942652702331543, + -11.553879737854004, + -10.698843002319336, + -12.703505516052246, + -13.684463500976562, + -12.352499961853027, + -13.843332290649414, + -10.838998794555664, + -10.505772590637207, + -9.910398483276367, + -10000.0, + -13.492959976196289, + -12.739365577697754, + -12.663825035095215, + -11.77187728881836, + -12.291872024536133, + -9.96491813659668, + -11.795421600341797, + -8.20328426361084, + -5.926211357116699, + -6.1764984130859375, + -12.557028770446777, + -13.187960624694824, + -13.275311470031738, + -9.935545921325684, + -12.842405319213867, + -8.376090049743652, + -12.797646522521973, + -10.724736213684082, + -7.551031589508057, + -12.609972953796387, + -10.906079292297363, + -8.062232971191406, + -12.41627025604248, + -14.217342376708984, + -13.32412338256836, + -7.829031944274902, + -11.670573234558105, + -13.152725219726562, + -13.748356819152832, + -8.923832893371582, + -13.297384262084961, + -13.406578063964844, + -12.794132232666016, + -12.182308197021484, + -12.669015884399414, + -13.825421333312988, + -8.497254371643066, + -13.710942268371582, + -10.821403503417969, + -12.96695327758789, + -13.50233268737793, + -14.252979278564453, + -12.690325736999512, + -12.644253730773926, + -11.217681884765625, + -5.530364990234375, + -11.728997230529785, + -14.01970100402832, + -14.183956146240234, + -6.582267761230469, + -9.365259170532227, + -6.0461626052856445, + -7.482025623321533, + -7.319528579711914, + -13.754130363464355, + -12.329744338989258, + -14.684525489807129, + -7.452380657196045, + -12.367213249206543, + -13.013487815856934, + -10.484649658203125, + -11.331403732299805, + -13.430648803710938, + -11.846324920654297, + -11.766498565673828, + -11.94919490814209, + -13.64667797088623, + -12.330714225769043, + -12.643916130065918, + -12.118013381958008, + -12.920206069946289, + -9.852779388427734, + -13.62667465209961, + -12.794713020324707, + -13.23983097076416, + -9.644025802612305, + -13.41153335571289, + -10.862459182739258, + -13.595255851745605, + -12.811727523803711, + -12.114456176757812, + -12.617325782775879, + -12.534378051757812, + -12.435154914855957, + -11.79420280456543, + -13.141073226928711, + -10.228925704956055, + -12.648173332214355, + -10.5259370803833, + -13.075540542602539, + -12.833207130432129, + -12.930810928344727, + -11.625775337219238, + -8.988334655761719, + -11.492377281188965, + -14.40893268585205, + -12.106353759765625, + -13.25969409942627, + -13.190732955932617, + -14.228679656982422, + -13.389674186706543, + -11.702837944030762, + -12.21057415008545, + -13.743621826171875, + -14.52221393585205, + -13.440570831298828, + -12.3108491897583, + -13.243945121765137, + -13.412277221679688, + -11.340847969055176, + -9.651451110839844, + -11.63448429107666, + -14.10894775390625, + -13.651695251464844, + -13.614228248596191, + -12.83806037902832, + -13.661396026611328, + -9.87491512298584, + -9.8951416015625, + -11.809096336364746, + -10000.0, + -11.665786743164062, + -9.869390487670898, + -13.02005672454834, + -13.772218704223633, + -12.269754409790039, + -10000.0, + -10.893101692199707, + -14.356070518493652, + -13.704068183898926, + -13.16357707977295, + -13.131183624267578, + -11.631052017211914, + -11.685710906982422, + -8.907776832580566, + -9.99026870727539, + -14.045997619628906, + -12.187337875366211, + -13.198140144348145, + -11.964822769165039, + -12.250166893005371, + -14.266410827636719, + -11.802629470825195, + -11.381916046142578, + -11.162945747375488, + -12.44157886505127, + -9.786359786987305, + -10000.0, + -11.233309745788574, + -11.009201049804688, + -12.883721351623535, + -11.877254486083984, + -12.20182991027832, + -14.277528762817383, + -13.7249755859375, + -12.742781639099121, + -12.661029815673828, + -12.547115325927734, + -10.75460147857666, + -12.907571792602539, + -12.363632202148438, + -11.848713874816895, + -14.31727123260498, + -14.046844482421875, + -12.718457221984863, + -13.913930892944336, + -13.322484016418457, + -12.36288833618164, + -11.979717254638672, + -12.366744041442871, + -13.646872520446777, + -13.255087852478027, + -12.110769271850586, + -8.450657844543457, + -12.003588676452637, + -8.075675010681152, + -13.522724151611328, + -13.485895156860352, + -13.05333423614502, + -13.399734497070312, + -11.368695259094238, + -13.210000038146973, + -13.97340202331543, + -11.725092887878418, + -13.334667205810547, + -10.739959716796875, + -13.98511791229248, + -13.574196815490723, + -12.201776504516602, + -12.244017601013184, + -11.784577369689941, + -11.283102035522461, + -6.465692520141602, + -13.550567626953125, + -10000.0, + -11.969606399536133, + -7.697822093963623, + -14.338610649108887, + -13.137377738952637, + -12.166990280151367, + -11.836442947387695, + -13.03665542602539, + -10000.0, + -12.438132286071777, + -11.183541297912598, + -12.041357040405273, + -12.614006996154785, + -11.056611061096191, + -12.743069648742676, + -12.888111114501953, + -11.329586029052734, + -10.89394760131836, + -13.152234077453613, + -12.330127716064453, + -11.13021469116211, + -12.93645191192627, + -11.057968139648438, + -10000.0, + -10.721052169799805, + -13.35482406616211, + -12.192888259887695, + -13.680505752563477, + -11.670418739318848, + -11.871618270874023, + -11.242685317993164, + -9.557723999023438, + -10000.0, + -10000.0, + -11.125853538513184, + -9.22323226928711, + -13.24915599822998, + -11.91252613067627, + -12.721294403076172, + -11.174097061157227, + -7.990510940551758, + -13.991429328918457, + -13.859502792358398, + -11.520356178283691, + -13.971871376037598, + -10.134190559387207, + -13.07124137878418, + -13.591009140014648, + -10000.0, + -12.195642471313477, + -11.237064361572266, + -14.172703742980957, + -12.285969734191895, + -12.486908912658691, + -13.344427108764648, + -14.147533416748047, + -8.739598274230957, + -10.91167163848877, + -9.65263843536377, + -12.717270851135254, + -9.05600643157959, + -9.518941879272461, + -13.623188972473145, + -13.740755081176758, + -12.843908309936523, + -12.340595245361328, + -11.486748695373535, + -12.710258483886719, + -8.179301261901855, + -9.062299728393555, + -6.97949743270874, + -11.858112335205078, + -11.286624908447266, + -11.264288902282715, + -11.513409614562988, + -12.92937183380127, + -13.888775825500488, + -11.602699279785156, + -11.45335578918457, + -8.737150192260742, + -13.517394065856934, + -5.306643009185791, + -10.628350257873535, + -11.167513847351074, + -8.796040534973145, + -10000.0, + -7.871254920959473, + -12.365166664123535, + -10.174576759338379, + -11.040783882141113, + -10.433000564575195, + -13.301560401916504, + -10.412057876586914, + -12.430901527404785, + -10.438301086425781, + -12.994340896606445, + -9.674080848693848, + -6.31805419921875, + -10000.0, + -13.583410263061523, + -12.609077453613281, + -12.849180221557617, + -13.826027870178223, + -12.031959533691406, + -11.845420837402344, + -13.824118614196777, + -14.176135063171387, + -11.988388061523438, + -11.812614440917969, + -11.836989402770996, + -11.457304000854492, + -7.770702362060547, + -7.727717399597168, + -11.643401145935059, + -11.470467567443848, + -7.818939208984375, + -11.843179702758789, + -13.107500076293945, + -3.9299705028533936, + -8.545232772827148, + -10.604230880737305, + -11.725444793701172, + -14.248205184936523, + -12.327702522277832, + -10.582293510437012, + -13.244439125061035, + -12.059713363647461, + -12.921284675598145, + -12.883295059204102, + -8.039029121398926, + -12.784309387207031, + -9.565606117248535, + -13.330506324768066, + -13.591753005981445, + -11.633308410644531, + -8.91528034210205, + -13.258749961853027, + -14.396084785461426, + -12.985039710998535, + -7.417489528656006, + -13.149593353271484, + -12.515260696411133, + -12.587913513183594, + -8.725703239440918, + -12.955209732055664, + -13.303566932678223, + -12.77481460571289, + -10.94013786315918, + -12.95765495300293, + -12.781828880310059, + -7.8542914390563965, + -10.790460586547852, + -11.324527740478516, + -10000.0, + -14.37807559967041, + -11.92667007446289, + -13.613455772399902, + -8.873832702636719, + -10.859443664550781, + -10000.0, + -13.676170349121094, + -10000.0, + -10.775018692016602, + -12.95517349243164, + -13.232227325439453, + -14.387094497680664, + -12.78573989868164, + -11.10414981842041, + -12.770772933959961, + -8.37712574005127, + -13.284975051879883, + -12.99605655670166, + -12.795123100280762, + -12.59602165222168, + -14.046062469482422, + -12.717855453491211, + -11.992505073547363, + -11.261098861694336, + -11.927038192749023, + -7.993113040924072, + -12.678472518920898, + -8.111339569091797, + -8.321968078613281, + -9.167539596557617, + -6.938859939575195, + -10.086030006408691, + -11.12663459777832, + -10.549439430236816, + -10.726285934448242, + -9.660466194152832, + -8.06203842163086, + -9.214642524719238, + -13.663848876953125, + -11.201567649841309, + -11.692606925964355, + -10000.0, + -11.061088562011719, + -11.801623344421387, + -12.245379447937012, + -12.138701438903809, + -12.882545471191406, + -12.515228271484375, + -13.013315200805664, + -10000.0, + -12.440690994262695, + -11.182541847229004, + -12.293285369873047, + -13.289390563964844, + -8.25012493133545, + -9.714176177978516, + -8.54870319366455, + -10.123932838439941, + -10.123574256896973, + -10.787864685058594, + -10.535843849182129, + -12.084434509277344, + -11.367830276489258, + -9.272168159484863, + -12.730428695678711, + -10.77696418762207, + -11.299437522888184, + -9.2794771194458, + -10.121893882751465, + -12.715132713317871, + -9.817319869995117, + -11.207874298095703, + -9.38442325592041, + -12.960518836975098, + -9.32898998260498, + -11.51174259185791, + -10.884286880493164, + -11.72599983215332, + -13.018866539001465, + -13.688644409179688, + -11.104406356811523, + -9.628833770751953, + -10000.0, + -14.26093864440918, + -11.645624160766602, + -13.037396430969238, + -13.70888900756836, + -14.17890739440918, + -12.572925567626953, + -11.417001724243164, + -11.92243480682373, + -10000.0, + -10000.0, + -9.334487915039062, + -11.304604530334473, + -8.963071823120117, + -8.566967010498047, + -8.42164134979248, + -7.929381370544434, + -12.258378028869629, + -8.675025939941406, + -7.946563243865967, + -12.063398361206055, + -11.749732971191406, + -10000.0, + -10000.0, + -11.486349105834961, + -8.294340133666992, + -8.934319496154785, + -9.14141845703125, + -8.774731636047363, + -9.606856346130371, + -7.985258102416992, + -8.77881145477295, + -9.519185066223145, + -11.011818885803223, + -10000.0, + -12.663806915283203, + -9.498204231262207, + -11.668294906616211, + -11.114726066589355, + -10.064451217651367, + -12.4163236618042, + -9.18289566040039, + -14.760283470153809, + -12.412688255310059, + -12.485615730285645, + -13.954992294311523, + -10.590344429016113, + -11.5427827835083, + -10.981088638305664, + -7.230983257293701, + -12.179085731506348, + -11.862399101257324, + -8.748784065246582, + -12.161140441894531, + -6.327768802642822, + -10.773768424987793, + -11.07607364654541, + -12.150201797485352, + -11.026751518249512, + -10.101223945617676, + -12.312088966369629, + -13.726814270019531, + -10.998577117919922, + -11.658036231994629, + -12.724591255187988, + -8.90641975402832, + -10.587677001953125, + -13.232854843139648, + -12.673201560974121, + -12.671024322509766, + -9.579854965209961, + -12.255494117736816, + -13.195446968078613, + -13.792229652404785, + -12.136938095092773, + -14.033784866333008, + -11.097009658813477, + -12.227850914001465, + -8.381165504455566, + -11.749275207519531, + -10.788141250610352, + -9.176112174987793, + -8.276718139648438, + -12.19137954711914, + -12.501619338989258, + -10000.0, + -12.824344635009766, + -8.281817436218262, + -11.095662117004395, + -14.165250778198242, + -11.348094940185547, + -12.567456245422363, + -8.52457332611084, + -12.733758926391602, + -14.122416496276855, + -12.041360855102539, + -14.223989486694336, + -10.91444206237793, + -11.301746368408203, + -13.864544868469238, + -9.358236312866211, + -12.629053115844727, + -13.954301834106445, + -12.757906913757324, + -11.3294677734375, + -12.579774856567383, + -9.734641075134277, + -12.667106628417969, + -7.761864185333252, + -13.73751163482666, + -8.641522407531738, + -9.142242431640625, + -11.219501495361328, + -8.902569770812988, + -10.32934284210205, + -9.886985778808594, + -12.262642860412598, + -10.265408515930176, + -10.638322830200195, + -13.340587615966797, + -13.356618881225586, + -11.643696784973145, + -14.654583930969238, + -11.425148963928223, + -9.355377197265625, + -13.895018577575684, + -7.402246475219727, + -14.136250495910645, + -6.3201680183410645, + -13.139766693115234, + -12.429423332214355, + -6.986958980560303, + -12.735187530517578, + -9.379314422607422, + -13.72009563446045, + -14.089472770690918, + -13.748627662658691, + -11.978914260864258, + -13.91929817199707, + -13.818605422973633, + -12.800165176391602, + -12.482606887817383, + -7.0481696128845215, + -13.999007225036621, + -12.334272384643555, + -9.02311897277832, + -12.519035339355469, + -12.279437065124512, + -13.015851020812988, + -11.549497604370117, + -10.443224906921387, + -11.093907356262207, + -11.322037696838379, + -12.739632606506348, + -11.954368591308594, + -11.006121635437012, + -6.716159820556641, + -13.072667121887207, + -9.844562530517578, + -7.880402088165283, + -12.887127876281738, + -9.566628456115723, + -12.454107284545898, + -13.231634140014648, + -10.958243370056152, + -7.653661727905273, + -10.862187385559082, + -12.770365715026855, + -13.171697616577148, + -12.640562057495117, + -12.947507858276367, + -12.908676147460938, + -13.518932342529297, + -12.32068157196045, + -13.127158164978027, + -11.459029197692871, + -8.377157211303711, + -10000.0, + -10.775200843811035, + -10.465039253234863, + -12.271913528442383, + -14.012896537780762, + -11.834538459777832, + -11.865020751953125, + -13.912064552307129, + -13.423148155212402, + -12.969127655029297, + -13.150151252746582, + -13.349272727966309, + -13.358369827270508, + -13.805335998535156, + -10000.0, + -9.97810173034668, + -9.83073902130127, + -10.826997756958008, + -10.815749168395996, + -13.47970962524414, + -13.573468208312988, + -7.573562145233154, + -13.315520286560059, + -9.457472801208496, + -10.05649185180664, + -8.165127754211426, + -12.03433609008789, + -11.478641510009766, + -13.337766647338867, + -12.477649688720703, + -12.112797737121582, + -13.071025848388672, + -13.196009635925293, + -13.268590927124023, + -12.347124099731445, + -13.836188316345215, + -12.877021789550781, + -11.046356201171875, + -9.727120399475098, + -11.82292366027832, + -12.20386028289795, + -11.933242797851562, + -13.743108749389648, + -10000.0, + -13.26471996307373, + -7.864959716796875, + -12.188377380371094, + -13.456745147705078, + -13.93160629272461, + -10000.0, + -12.802282333374023, + -9.642523765563965, + -13.602763175964355, + -13.777410507202148, + -14.326563835144043, + -12.257325172424316, + -8.880353927612305, + -11.984357833862305, + -10.867341995239258, + -13.713247299194336, + -10.132527351379395, + -13.895491600036621, + -14.169499397277832, + -11.725127220153809, + -13.49624252319336, + -13.843846321105957, + -13.832304000854492, + -13.618553161621094, + -11.058784484863281, + -10.402983665466309, + -10.856165885925293 + ] +} + diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java index 066a975d7de7d..86cefd71b2d5f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java @@ -49,7 +49,8 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry { // version 6: Added 'host.arch' keyword mapping to profiling-hosts // version 7: Added 'host.type', 'cloud.provider', 'cloud.region' keyword mappings to profiling-hosts // version 8: Changed from disabled _source to synthetic _source for profiling-events-* and profiling-metrics - public static final int INDEX_TEMPLATE_VERSION = 8; + // version 9: Changed sort order for profiling-events-* + public static final int INDEX_TEMPLATE_VERSION = 9; // history for individual indices / index templates. Only bump these for breaking changes that require to create a new index public static final int PROFILING_EVENTS_VERSION = 3; diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml index c84c66f8aa31d..cd227eec4e227 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml @@ -1,8 +1,8 @@ setup: - - skip: - features: close_to - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'rank added in 8.8' + test_runner_features: "close_to" - do: indices.create: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml index c9eaa01616175..a4972d0557dab 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'rank added in 8.8' - do: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml index 1c950be5bfbf9..575723853f0aa 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: 'pagination for rrf was added in 8.15' - do: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml index 0583e6d7ae51a..76cedf44d3dbe 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml @@ -1,8 +1,8 @@ setup: - - skip: - features: close_to - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'rank added in 8.8' + test_runner_features: "close_to" - do: indices.create: diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java index a15dc19bb4abf..c3b568fc32b71 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java @@ -12,6 +12,8 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.core.Predicates; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -20,6 +22,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; import org.elasticsearch.xpack.core.rollup.RollupFeatureSetUsage; +import org.elasticsearch.xpack.core.rollup.job.RollupJob; public class RollupUsageTransportAction extends XPackUsageFeatureTransportAction { @@ -48,8 +51,12 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { - // TODO expose the currently running rollup tasks on this node? Unclear the best way to do that - RollupFeatureSetUsage usage = new RollupFeatureSetUsage(); + int numberOfRollupJobs = 0; + PersistentTasksCustomMetadata persistentTasks = state.metadata().custom(PersistentTasksCustomMetadata.TYPE); + if (persistentTasks != null) { + numberOfRollupJobs = persistentTasks.findTasks(RollupJob.NAME, Predicates.always()).size(); + } + RollupFeatureSetUsage usage = new RollupFeatureSetUsage(numberOfRollupJobs); listener.onResponse(new XPackUsageFeatureResponse(usage)); } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java index b0881eb350d5a..243b478db6dbf 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java @@ -8,18 +8,19 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.rollup.RollupFeatureSetUsage; import java.io.IOException; import java.util.concurrent.ExecutionException; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.Is.is; import static org.mockito.Mockito.mock; @@ -42,13 +43,15 @@ public void testUsage() throws ExecutionException, InterruptedException, IOExcep TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); var usageAction = new RollupUsageTransportAction(transportService, null, threadPool, mock(ActionFilters.class), null); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, null, future); - XPackFeatureSet.Usage rollupUsage = future.get().getUsage(); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); + RollupFeatureSetUsage rollupUsage = (RollupFeatureSetUsage) future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); rollupUsage.writeTo(out); - XPackFeatureSet.Usage serializedUsage = new RollupFeatureSetUsage(out.bytes().streamInput()); + var serializedUsage = new RollupFeatureSetUsage(out.bytes().streamInput()); assertThat(rollupUsage.name(), is(serializedUsage.name())); assertThat(rollupUsage.enabled(), is(serializedUsage.enabled())); + assertThat(rollupUsage.enabled(), is(serializedUsage.enabled())); + assertThat(rollupUsage.getNumberOfRollupJobs(), equalTo(serializedUsage.getNumberOfRollupJobs())); } } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java index 2f3ece56b3281..3154a5ac0cd7d 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java @@ -108,6 +108,7 @@ public void testCrossClusterSearchWithApiKey() throws Exception { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "role with privileges for remote and local indices", "cluster": ["manage_own_api_key"], "indices": [ { diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java index ccf9d66a5bc21..cbf735c66462c 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java @@ -99,6 +99,7 @@ public void testBwcWithLegacyCrossClusterSearch() throws Exception { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "This description should not be sent to remote clusters.", "cluster": ["manage_own_api_key"], "indices": [ { diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index c6bb6e10f0537..6eb49ec1ab8ae 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -187,6 +187,7 @@ public void testCrossClusterSearch() throws Exception { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "Role with privileges for remote and local indices.", "indices": [ { "names": ["local_index"], @@ -293,6 +294,7 @@ public void testCrossClusterSearch() throws Exception { final var putLocalSearchRoleRequest = new Request("PUT", "/_security/role/local_search"); putLocalSearchRoleRequest.setJsonEntity(Strings.format(""" { + "description": "Role with privileges for searching local only indices.", "indices": [ { "names": ["local_index"], diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 65651b4a7eb65..2fc894c69aa4c 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -341,6 +341,7 @@ public class Constants { "cluster:monitor/update/health/info", "cluster:monitor/ingest/geoip/stats", "cluster:monitor/main", + "cluster:monitor/nodes/capabilities", "cluster:monitor/nodes/data_tier_usage", "cluster:monitor/nodes/hot_threads", "cluster:monitor/nodes/info", diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java index 3ad250c4e6037..bdbd5c659c479 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java @@ -87,8 +87,16 @@ protected void createRole(String name, Collection clusterPrivileges) thr final RoleDescriptor role = new RoleDescriptor( name, clusterPrivileges.toArray(String[]::new), - new RoleDescriptor.IndicesPrivileges[0], - new String[0] + null, + null, + null, + null, + null, + null, + null, + null, + null, + null ); getSecurityClient().putRole(role); } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index fc522b0213eeb..1b0d3397daa90 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -97,7 +97,7 @@ public void createUsers() throws IOException { createUser(MANAGE_API_KEY_USER, END_USER_PASSWORD, List.of("manage_api_key_role")); createRole("manage_api_key_role", Set.of("manage_api_key")); createUser(MANAGE_SECURITY_USER, END_USER_PASSWORD, List.of("manage_security_role")); - createRole("manage_security_role", Set.of("manage_security")); + createRoleWithDescription("manage_security_role", Set.of("manage_security"), "Allows all security-related operations!"); } @After @@ -1681,6 +1681,134 @@ public void testCrossClusterApiKeyAccessInResponseCanBeUsedAsInputForUpdate() th assertThat(updateResponse4.evaluate("updated"), is(false)); } + public void testUserRoleDescriptionsGetsRemoved() throws IOException { + // Creating API key whose owner's role (limited-by) has description should succeed, + // and limited-by role descriptor should be filtered to remove description. + { + final Request createRestApiKeyRequest = new Request("POST", "_security/api_key"); + setUserForRequest(createRestApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + createRestApiKeyRequest.setJsonEntity(""" + { + "name": "my-api-key" + } + """); + final ObjectPath createRestApiKeyResponse = assertOKAndCreateObjectPath(client().performRequest(createRestApiKeyRequest)); + String apiKeyId = createRestApiKeyResponse.evaluate("id"); + + ObjectPath fetchResponse = assertOKAndCreateObjectPath(fetchApiKeyWithUser(MANAGE_SECURITY_USER, apiKeyId, true)); + assertThat(fetchResponse.evaluate("api_keys.0.id"), equalTo(apiKeyId)); + assertThat(fetchResponse.evaluate("api_keys.0.role_descriptors"), equalTo(Map.of())); + assertThat(fetchResponse.evaluate("api_keys.0.limited_by.0.manage_security_role.description"), is(nullValue())); + + // Updating should behave the same as create. No limited-by role description should be persisted. + final Request updateRequest = new Request("PUT", "_security/api_key/" + apiKeyId); + setUserForRequest(updateRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + updateRequest.setJsonEntity(""" + { + "role_descriptors":{ + "my-role": { + "cluster": ["all"] + } + } + } + """); + assertThat(responseAsMap(client().performRequest(updateRequest)).get("updated"), equalTo(true)); + fetchResponse = assertOKAndCreateObjectPath(fetchApiKeyWithUser(MANAGE_SECURITY_USER, apiKeyId, true)); + assertThat(fetchResponse.evaluate("api_keys.0.id"), equalTo(apiKeyId)); + assertThat(fetchResponse.evaluate("api_keys.0.limited_by.0.manage_security_role.description"), is(nullValue())); + assertThat(fetchResponse.evaluate("api_keys.0.role_descriptors.my-role.cluster"), equalTo(List.of("all"))); + } + { + final Request grantApiKeyRequest = new Request("POST", "_security/api_key/grant"); + grantApiKeyRequest.setJsonEntity(Strings.format(""" + { + "grant_type":"password", + "username":"%s", + "password":"%s", + "api_key":{ + "name":"my-granted-api-key", + "role_descriptors":{ + "my-role":{ + "cluster":["all"] + } + } + } + }""", MANAGE_SECURITY_USER, END_USER_PASSWORD)); + String grantedApiKeyId = assertOKAndCreateObjectPath(adminClient().performRequest(grantApiKeyRequest)).evaluate("id"); + var fetchResponse = assertOKAndCreateObjectPath(fetchApiKeyWithUser(MANAGE_SECURITY_USER, grantedApiKeyId, true)); + assertThat(fetchResponse.evaluate("api_keys.0.id"), equalTo(grantedApiKeyId)); + assertThat(fetchResponse.evaluate("api_keys.0.name"), equalTo("my-granted-api-key")); + assertThat(fetchResponse.evaluate("api_keys.0.limited_by.0.manage_security_role.description"), is(nullValue())); + assertThat(fetchResponse.evaluate("api_keys.0.role_descriptors.my-role.cluster"), equalTo(List.of("all"))); + } + } + + public void testCreatingApiKeyWithRoleDescriptionFails() throws IOException { + final Request createRequest = new Request("POST", "_security/api_key"); + setUserForRequest(createRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + createRequest.setJsonEntity(""" + { + "name": "my-api-key" + } + """); + final ObjectPath createResponse = assertOKAndCreateObjectPath(client().performRequest(createRequest)); + String apiKeyId = createResponse.evaluate("id"); + + final Request updateRequest = new Request("PUT", "_security/api_key/" + apiKeyId); + setUserForRequest(updateRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + updateRequest.setJsonEntity(""" + { + "role_descriptors":{ + "my-role": { + "description": "This description should not be allowed!" + } + } + } + """); + + var e = expectThrows(ResponseException.class, () -> client().performRequest(updateRequest)); + assertThat(e.getMessage(), containsString("failed to parse role [my-role]. unexpected field [description]")); + } + + public void testUpdatingApiKeyWithRoleDescriptionFails() throws IOException { + final Request createRestApiKeyRequest = new Request("POST", "_security/api_key"); + setUserForRequest(createRestApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + createRestApiKeyRequest.setJsonEntity(""" + { + "name": "my-api-key", + "role_descriptors":{ + "my-role": { + "description": "This description should not be allowed!" + } + } + } + """); + + var e = expectThrows(ResponseException.class, () -> client().performRequest(createRestApiKeyRequest)); + assertThat(e.getMessage(), containsString("failed to parse role [my-role]. unexpected field [description]")); + } + + public void testGrantApiKeyWithRoleDescriptionFails() throws Exception { + final Request grantApiKeyRequest = new Request("POST", "_security/api_key/grant"); + setUserForRequest(grantApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + grantApiKeyRequest.setJsonEntity(Strings.format(""" + { + "grant_type":"password", + "username":"%s", + "password":"%s", + "api_key":{ + "name":"my-granted-api-key", + "role_descriptors":{ + "my-role":{ + "description": "This role does not grant any permissions!" + } + } + } + }""", MANAGE_SECURITY_USER, END_USER_PASSWORD.toString())); + var e = expectThrows(ResponseException.class, () -> client().performRequest(grantApiKeyRequest)); + assertThat(e.getMessage(), containsString("failed to parse role [my-role]. unexpected field [description]")); + } + public void testWorkflowsRestrictionSupportForApiKeys() throws IOException { final Request createApiKeyRequest = new Request("POST", "_security/api_key"); createApiKeyRequest.setJsonEntity(""" @@ -1916,6 +2044,22 @@ private Response fetchApiKey(String apiKeyId) throws IOException { return getApiKeyResponse; } + private Response fetchApiKeyWithUser(String username, String apiKeyId, boolean withLimitedBy) throws IOException { + final Request fetchRequest; + if (randomBoolean()) { + fetchRequest = new Request("GET", "/_security/api_key"); + fetchRequest.addParameter("id", apiKeyId); + fetchRequest.addParameter("with_limited_by", String.valueOf(withLimitedBy)); + } else { + fetchRequest = new Request("GET", "/_security/_query/api_key"); + fetchRequest.addParameter("with_limited_by", String.valueOf(withLimitedBy)); + fetchRequest.setJsonEntity(Strings.format(""" + { "query": { "ids": { "values": ["%s"] } } }""", apiKeyId)); + } + setUserForRequest(fetchRequest, username, END_USER_PASSWORD); + return client().performRequest(fetchRequest); + } + private void assertBadCreateCrossClusterApiKeyRequest(String body, String expectedErrorMessage) throws IOException { final Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); createRequest.setJsonEntity(body); @@ -2178,8 +2322,27 @@ private void createRole(String name, Collection localClusterPrivileges, remoteIndicesClusterAliases ) ), + null, null ); getSecurityClient().putRole(role); } + + protected void createRoleWithDescription(String name, Collection clusterPrivileges, String description) throws IOException { + final RoleDescriptor role = new RoleDescriptor( + name, + clusterPrivileges.toArray(String[]::new), + null, + null, + null, + null, + null, + null, + null, + null, + null, + description + ); + getSecurityClient().putRole(role); + } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java index 9402d627063c4..500b796e62660 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java @@ -102,6 +102,7 @@ public void setup() throws IOException { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "Grants permission for searching local and remote clusters.", "cluster": ["manage_api_key"], "indices": [ { @@ -204,7 +205,8 @@ public void testCrossClusterAccessHeadersSentSingleRemote() throws Exception { null, null, null, - null + null, + null // description is never sent across clusters ) ) ); @@ -273,6 +275,7 @@ public void testCrossClusterAccessHeadersSentMultipleRemotes() throws Exception null, null, null, + null, null ) ) @@ -305,6 +308,7 @@ public void testCrossClusterAccessHeadersSentMultipleRemotes() throws Exception null, null, null, + null, null ) ) @@ -418,6 +422,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ), @@ -438,6 +443,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ) @@ -466,6 +472,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ), @@ -489,6 +496,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ) @@ -581,6 +589,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ), @@ -601,6 +610,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) @@ -625,6 +635,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) @@ -713,6 +724,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ), @@ -733,6 +745,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) @@ -757,6 +770,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java new file mode 100644 index 0000000000000..95a650737d452 --- /dev/null +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.role; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.Strings; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.support.Validation; +import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class RoleWithDescriptionRestIT extends SecurityOnTrialLicenseRestTestCase { + + public void testCreateOrUpdateRoleWithDescription() throws Exception { + final String roleName = "role_with_description"; + final String initialRoleDescription = randomAlphaOfLengthBetween(0, 10); + { + Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/" + roleName); + createRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["*"], "privileges": ["all"]}] + }""", initialRoleDescription)); + Response createResponse = adminClient().performRequest(createRoleRequest); + assertOK(createResponse); + fetchRoleAndAssertEqualsExpected( + roleName, + new RoleDescriptor( + roleName, + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + initialRoleDescription + ) + ); + } + { + final String newRoleDescription = randomValueOtherThan(initialRoleDescription, () -> randomAlphaOfLengthBetween(0, 10)); + Request updateRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role/" + roleName); + updateRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["index-*"], "privileges": ["all"]}] + }""", newRoleDescription)); + Response updateResponse = adminClient().performRequest(updateRoleRequest); + assertOK(updateResponse); + + fetchRoleAndAssertEqualsExpected( + roleName, + new RoleDescriptor( + roleName, + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("index-*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + newRoleDescription + ) + ); + } + } + + public void testCreateRoleWithInvalidDescriptionFails() { + Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/role_with_large_description"); + createRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["*"], "privileges": ["all"]}] + }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5)))); + + ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(createRoleRequest)); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat( + e.getMessage(), + containsString("Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters.") + ); + } + + public void testUpdateRoleWithInvalidDescriptionFails() throws IOException { + Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/my_role"); + createRoleRequest.setJsonEntity(""" + { + "cluster": ["all"], + "indices": [{"names": ["*"], "privileges": ["all"]}] + }"""); + Response createRoleResponse = adminClient().performRequest(createRoleRequest); + assertOK(createRoleResponse); + + Request updateRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role/my_role"); + updateRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["index-*"], "privileges": ["all"]}] + }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5)))); + + ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(updateRoleRequest)); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat( + e.getMessage(), + containsString("Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters.") + ); + } + + private void fetchRoleAndAssertEqualsExpected(final String roleName, final RoleDescriptor expectedRoleDescriptor) throws IOException { + final Response getRoleResponse = adminClient().performRequest(new Request("GET", "/_security/role/" + roleName)); + assertOK(getRoleResponse); + final Map actual = responseAsParser(getRoleResponse).map( + HashMap::new, + p -> RoleDescriptor.parserBuilder().allowDescription(true).build().parse(expectedRoleDescriptor.getName(), p) + ); + assertThat(actual, equalTo(Map.of(expectedRoleDescriptor.getName(), expectedRoleDescriptor))); + } +} diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java index 28da12b226a66..aa5967ea7277a 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java @@ -89,6 +89,7 @@ public void testRemoteIndexPrivileges() throws IOException { .grantedFields("field") .build() }, null, + null, null ) ); @@ -163,6 +164,7 @@ public void testRemoteIndexPrivileges() throws IOException { .grantedFields("field") .build() }, null, + null, null ) ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 286a9cb736b1b..7c753692628cb 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -7,13 +7,11 @@ package org.elasticsearch.integration; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; -import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; @@ -27,15 +25,10 @@ import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; import org.junit.After; @@ -46,31 +39,25 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; +import java.util.stream.Collectors; import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; import static org.elasticsearch.xcontent.XContentType.JSON; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.Mockito.mock; /** - * Tests that file settings service can properly add role mappings. + * Tests that file settings service can properly add role mappings and detect REST clashes + * with the reserved role mappings. */ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { @@ -148,21 +135,12 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { } }"""; - @Override - protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - Settings.Builder builder = Settings.builder() - .put(super.nodeSettings(nodeOrdinal, otherSettings)) - // some tests make use of cluster-state based role mappings - .put("xpack.security.authc.cluster_state_role_mappings.enabled", true); - return builder.build(); - } - @After public void cleanUp() { updateClusterSettings(Settings.builder().putNull("indices.recovery.max_bytes_per_sec")); } - public static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter) throws Exception { + private void writeJSONFile(String node, String json) throws Exception { long version = versionCounter.incrementAndGet(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); @@ -173,11 +151,10 @@ public static void writeJSONFile(String node, String json, Logger logger, Atomic Files.createDirectories(fileSettingsService.watchedFileDir()); Path tempFilePath = createTempFile(); - logger.info("--> before writing JSON config to node {} with path {}", node, tempFilePath); + logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); logger.info(Strings.format(json, version)); Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); - logger.info("--> after writing JSON config to node {} with path {}", node, tempFilePath); } private Tuple setupClusterStateListener(String node, String expectedKey) { @@ -261,41 +238,49 @@ private void assertRoleMappingsSaveOK(CountDownLatch savedClusterState, AtomicLo expectThrows(ExecutionException.class, () -> clusterAdmin().updateSettings(req).get()).getMessage() ); - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); - } - - // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings) var request = new GetRoleMappingsRequest(); request.setNames("everyone_kibana", "everyone_fleet"); var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - assertThat(response.mappings(), emptyArray()); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(r -> r.getName()).collect(Collectors.toSet()), + allOf(notNullValue(), containsInAnyOrder("everyone_kibana", "everyone_fleet")) + ); - // role mappings (with the same names) can also be stored in the "native" store - var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); - putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); + // Try using the REST API to update the everyone_kibana role mapping + // This should fail, we have reserved certain role mappings in operator mode + assertEquals( + "Failed to process request " + + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " + + "with errors: [[everyone_kibana] set as read-only by [file_settings]]", + expectThrows( + IllegalArgumentException.class, + () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet() + ).getMessage() + ); + assertEquals( + "Failed to process request " + + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " + + "with errors: [[everyone_fleet] set as read-only by [file_settings]]", + expectThrows( + IllegalArgumentException.class, + () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet() + ).getMessage() + ); } public void testRoleMappingsApplied() throws Exception { ensureGreen(); var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); - writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); + writeJSONFile(internalCluster().getMasterName(), testJSON); assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2()); logger.info("---> cleanup cluster settings..."); savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + writeJSONFile(internalCluster().getMasterName(), emptyJSON); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -307,65 +292,32 @@ public void testRoleMappingsApplied() throws Exception { clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) ); - // native role mappings are not affected by the removal of the cluster-state based ones - { - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertTrue(response.hasMappings()); - assertThat( - Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), - containsInAnyOrder("everyone_kibana", "everyone_fleet") - ); - } - - // and roles are resolved based on the native role mappings - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), contains("kibana_user_native")); - } - - { - var request = new DeleteRoleMappingRequest(); - request.setName("everyone_kibana"); - var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); - request = new DeleteRoleMappingRequest(); - request.setName("everyone_fleet"); - response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); - } - - // no roles are resolved now, because both native and cluster-state based stores have been cleared - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), empty()); - } + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertFalse(response.hasMappings()); } - public static Tuple setupClusterStateListenerForError( - ClusterService clusterService, - Consumer errorMetadataConsumer - ) { + private Tuple setupClusterStateListenerForError(String node) { + ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); AtomicLong metadataVersion = new AtomicLong(-1); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null && reservedState.errorMetadata() != null) { + if (reservedState != null + && reservedState.errorMetadata() != null + && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.PARSING) { clusterService.removeListener(this); metadataVersion.set(event.state().metadata().version()); savedClusterState.countDown(); - errorMetadataConsumer.accept(reservedState.errorMetadata()); + assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, reservedState.errorMetadata().errorKind()); + assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); + assertThat( + reservedState.errorMetadata().errors().get(0), + containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") + ); } } }); @@ -373,13 +325,22 @@ public void clusterChanged(ClusterChangedEvent event) { return new Tuple<>(savedClusterState, metadataVersion); } + private void assertRoleMappingsNotSaved(CountDownLatch savedClusterState, AtomicLong metadataVersion) throws Exception { + boolean awaitSuccessful = savedClusterState.await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + // This should succeed, nothing was reserved + client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana_bad")).get(); + client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet_ok")).get(); + } + public void testErrorSaved() throws Exception { ensureGreen(); // save an empty file to clear any prior state, this ensures we don't get a stale file left over by another test var savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + writeJSONFile(internalCluster().getMasterName(), emptyJSON); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -392,94 +353,76 @@ public void testErrorSaved() throws Exception { ); // save a bad file - savedClusterState = setupClusterStateListenerForError( - internalCluster().getCurrentMasterNodeInstance(ClusterService.class), - errorMetadata -> { - assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, errorMetadata.errorKind()); - assertThat(errorMetadata.errors(), allOf(notNullValue(), hasSize(1))); - assertThat( - errorMetadata.errors().get(0), - containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") - ); - } - ); + savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), testErrorJSON, logger, versionCounter); - awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); + writeJSONFile(internalCluster().getMasterName(), testErrorJSON); + assertRoleMappingsNotSaved(savedClusterState.v1(), savedClusterState.v2()); + } - // no roles are resolved because both role mapping stores are empty - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), empty()); - } + private Tuple setupClusterStateListenerForSecurityWriteError(String node) { + ClusterService clusterService = internalCluster().clusterService(node); + CountDownLatch savedClusterState = new CountDownLatch(1); + AtomicLong metadataVersion = new AtomicLong(-1); + clusterService.addListener(new ClusterStateListener() { + @Override + public void clusterChanged(ClusterChangedEvent event) { + ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); + if (reservedState != null + && reservedState.errorMetadata() != null + && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.VALIDATION) { + clusterService.removeListener(this); + metadataVersion.set(event.state().metadata().version()); + savedClusterState.countDown(); + assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); + assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); + assertThat(reservedState.errorMetadata().errors().get(0), containsString("closed")); + } + } + }); + + return new Tuple<>(savedClusterState, metadataVersion); } - public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { + public void testRoleMappingFailsToWriteToStore() throws Exception { ensureGreen(); - // expect the role mappings to apply even if the .security index is closed - var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); + var savedClusterState = setupClusterStateListenerForSecurityWriteError(internalCluster().getMasterName()); - try { - var closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); - assertTrue(closeIndexResponse.isAcknowledged()); - - writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - - // no native role mappings exist - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - - // cluster state settings are also applied - var clusterStateResponse = clusterAdmin().state(new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get())) - .get(); - assertThat( - clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()), - equalTo("50mb") - ); - - ReservedStateMetadata reservedState = clusterStateResponse.getState() - .metadata() - .reservedStateMetadata() - .get(FileSettingsService.NAMESPACE); - - ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); - assertThat(handlerMetadata.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); - - // and roles are resolved based on the cluster-state role mappings - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); - } - } finally { - savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - - var openIndexResponse = indicesAdmin().open(new OpenIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); - assertTrue(openIndexResponse.isAcknowledged()); - } + final CloseIndexResponse closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); + assertTrue(closeIndexResponse.isAcknowledged()); + + writeJSONFile(internalCluster().getMasterName(), testJSON); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertFalse(response.hasMappings()); + + final ClusterStateResponse clusterStateResponse = clusterAdmin().state( + new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get()) + ).get(); + + assertNull( + clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) + ); + + ReservedStateMetadata reservedState = clusterStateResponse.getState() + .metadata() + .reservedStateMetadata() + .get(FileSettingsService.NAMESPACE); + + ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); + assertTrue(handlerMetadata == null || handlerMetadata.keys().isEmpty()); } private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var json = """ { - "enabled": true, - "roles": [ "kibana_user_native" ], + "enabled": false, + "roles": [ "kibana_user" ], "rules": { "field": { "username": "*" } }, "metadata": { "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" @@ -490,7 +433,8 @@ private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var bis = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); var parser = JSON.xContent().createParser(XContentParserConfiguration.EMPTY, bis) ) { - return new PutRoleMappingRequestBuilder(null).source(name, parser).request(); + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); + return PutRoleMappingRequest.fromMapping(mapping); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java new file mode 100644 index 0000000000000..48e97b7afb897 --- /dev/null +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.analysis.common.CommonAnalysisPlugin; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; +import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.Strings; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.reservedstate.service.FileSettingsService; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.transport.netty4.Netty4Plugin; +import org.elasticsearch.xpack.wildcard.Wildcard; + +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) +public class FileSettingsRoleMappingsStartupIT extends SecurityIntegTestCase { + + private static AtomicLong versionCounter = new AtomicLong(1); + private static String testJSONForFailedCase = """ + { + "metadata": { + "version": "%s", + "compatibility": "8.4.0" + }, + "state": { + "role_mappings": { + "everyone_kibana_2": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_foo": "something" + } + } + } + } + }"""; + + @Override + protected void doAssertXPackIsInstalled() {} + + @Override + protected Path nodeConfigPath(int nodeOrdinal) { + return null; + } + + private void writeJSONFile(String node, String json) throws Exception { + long version = versionCounter.incrementAndGet(); + + FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); + + Files.deleteIfExists(fileSettingsService.watchedFile()); + + Files.createDirectories(fileSettingsService.watchedFileDir()); + Path tempFilePath = createTempFile(); + + logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); + logger.info(Strings.format(json, version)); + Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); + Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); + } + + private Tuple setupClusterStateListenerForError(String node) { + ClusterService clusterService = internalCluster().clusterService(node); + CountDownLatch savedClusterState = new CountDownLatch(1); + AtomicLong metadataVersion = new AtomicLong(-1); + clusterService.addListener(new ClusterStateListener() { + @Override + public void clusterChanged(ClusterChangedEvent event) { + ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); + if (reservedState != null && reservedState.errorMetadata() != null) { + assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); + assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); + assertThat(reservedState.errorMetadata().errors().get(0), containsString("Fake exception")); + clusterService.removeListener(this); + metadataVersion.set(event.state().metadata().version()); + savedClusterState.countDown(); + } else if (reservedState != null) { + logger.debug(() -> "Got reserved state update without error metadata: " + reservedState); + } else { + logger.debug(() -> "Got cluster state update: " + event.source()); + } + } + }); + + return new Tuple<>(savedClusterState, metadataVersion); + } + + @TestLogging( + value = "org.elasticsearch.common.file:DEBUG,org.elasticsearch.xpack.security:DEBUG,org.elasticsearch.cluster.metadata:DEBUG", + reason = "https://github.com/elastic/elasticsearch/issues/98391" + ) + public void testFailsOnStartMasterNodeWithError() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + + internalCluster().startMasterOnlyNode(); + + logger.info("--> write some role mappings, no other file settings"); + writeJSONFile(internalCluster().getMasterName(), testJSONForFailedCase); + var savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); + + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + } + + public Collection> nodePlugins() { + return Arrays.asList( + UnstableLocalStateSecurity.class, + Netty4Plugin.class, + ReindexPlugin.class, + CommonAnalysisPlugin.class, + InternalSettingsPlugin.class, + MapperExtrasPlugin.class, + Wildcard.class + ); + } + +} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 351cf05b2096d..58d6657b99e32 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -85,7 +85,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmDomain; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; @@ -2551,11 +2551,11 @@ public void testUpdateApiKeysNoopScenarios() throws Exception { final List newRoleDescriptors = List.of( randomValueOtherThanMany( rd -> RoleDescriptorRequestValidator.validate(rd) != null || initialRequest.getRoleDescriptors().contains(rd), - () -> RoleDescriptorTests.randomRoleDescriptor(false) + () -> RoleDescriptorTestHelper.builder().build() ), randomValueOtherThanMany( rd -> RoleDescriptorRequestValidator.validate(rd) != null || initialRequest.getRoleDescriptors().contains(rd), - () -> RoleDescriptorTests.randomRoleDescriptor(false) + () -> RoleDescriptorTestHelper.builder().build() ) ); response = updateSingleApiKeyMaybeUsingBulkAction( @@ -2769,7 +2769,7 @@ private List randomRoleDescriptors() { new RoleDescriptor(randomAlphaOfLength(10), new String[] { "all" }, null, null), randomValueOtherThanMany( rd -> RoleDescriptorRequestValidator.validate(rd) != null, - () -> RoleDescriptorTests.randomRoleDescriptor(false, true, false, true) + () -> RoleDescriptorTestHelper.builder().allowRemoteIndices(true).allowRemoteClusters(true).build() ) ); case 2 -> null; @@ -2887,6 +2887,7 @@ private void expectRoleDescriptorsForApiKey( final var descriptor = (Map) rawRoleDescriptor.get(expectedRoleDescriptor.getName()); final var roleDescriptor = RoleDescriptor.parserBuilder() .allowRestriction(true) + .allowDescription(true) .build() .parse( expectedRoleDescriptor.getName(), diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 9d56528a060c3..ce4c8719f0642 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -223,6 +223,7 @@ private void testAddAndGetRole(String roleName) { new BytesArray("{\"match_all\": {}}"), randomBoolean() ) + .description(randomAlphaOfLengthBetween(5, 20)) .metadata(metadata) .get(); logger.error("--> waiting for .security index"); @@ -245,6 +246,7 @@ private void testAddAndGetRole(String roleName) { new BytesArray("{\"match_all\": {}}"), randomBoolean() ) + .description(randomAlphaOfLengthBetween(5, 20)) .get(); preparePutRole("test_role3").cluster("all", "none") .runAs("root", "nobody") @@ -256,6 +258,7 @@ private void testAddAndGetRole(String roleName) { new BytesArray("{\"match_all\": {}}"), randomBoolean() ) + .description(randomAlphaOfLengthBetween(5, 20)) .get(); logger.info("--> retrieving all roles"); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 0ff4f1160af56..ef08f855a46cc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1103,7 +1103,8 @@ Collection createComponents( new SecurityUsageServices(realms, allRolesStore, nativeRoleMappingStore, ipFilter.get(), profileService, apiKeyService) ); - reservedRoleMappingAction.set(new ReservedRoleMappingAction()); + reservedRoleMappingAction.set(new ReservedRoleMappingAction(nativeRoleMappingStore)); + systemIndices.getMainIndexManager().onStateRecovered(state -> reservedRoleMappingAction.get().securityIndexRecovered()); cacheInvalidatorRegistry.validate(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java index 73d1a1abcdb50..852887767578f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java @@ -7,18 +7,24 @@ package org.elasticsearch.xpack.security.action.rolemapping; -import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.common.util.concurrent.ListenableFuture; +import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -32,59 +38,123 @@ * It is used by the ReservedClusterStateService to add/update or remove role mappings. Typical usage * for this action is in the context of file based settings. */ -public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { +public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { public static final String NAME = "role_mappings"; + private final NativeRoleMappingStore roleMappingStore; + private final ListenableFuture securityIndexRecoveryListener = new ListenableFuture<>(); + + /** + * Creates a ReservedRoleMappingAction + * + * @param roleMappingStore requires {@link NativeRoleMappingStore} for storing/deleting the mappings + */ + public ReservedRoleMappingAction(NativeRoleMappingStore roleMappingStore) { + this.roleMappingStore = roleMappingStore; + } + @Override public String name() { return NAME; } + private static Collection prepare(List roleMappings) { + List requests = roleMappings.stream().map(rm -> PutRoleMappingRequest.fromMapping(rm)).toList(); + + var exceptions = new ArrayList(); + for (var request : requests) { + // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX + var exception = request.validate(false); + if (exception != null) { + exceptions.add(exception); + } + } + + if (exceptions.isEmpty() == false) { + var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); + exceptions.forEach(illegalArgumentException::addSuppressed); + throw illegalArgumentException; + } + + return requests; + } + @Override public TransformState transform(Object source, TransformState prevState) throws Exception { + // We execute the prepare() call to catch any errors in the transform phase. + // Since we store the role mappings outside the cluster state, we do the actual save with a + // non cluster state transform call. @SuppressWarnings("unchecked") - Set roleMappings = validate((List) source); - RoleMappingMetadata newRoleMappingMetadata = new RoleMappingMetadata(roleMappings); - if (newRoleMappingMetadata.equals(RoleMappingMetadata.getFromClusterState(prevState.state()))) { - return prevState; - } else { - ClusterState newState = newRoleMappingMetadata.updateClusterState(prevState.state()); - Set entities = newRoleMappingMetadata.getRoleMappings() - .stream() - .map(ExpressionRoleMapping::getName) - .collect(Collectors.toSet()); - return new TransformState(newState, entities); + var requests = prepare((List) source); + return new TransformState( + prevState.state(), + prevState.keys(), + l -> securityIndexRecoveryListener.addListener( + ActionListener.wrap(ignored -> nonStateTransform(requests, prevState, l), l::onFailure) + ) + ); + } + + // Exposed for testing purposes + protected void nonStateTransform( + Collection requests, + TransformState prevState, + ActionListener listener + ) { + Set entities = requests.stream().map(r -> r.getName()).collect(Collectors.toSet()); + Set toDelete = new HashSet<>(prevState.keys()); + toDelete.removeAll(entities); + + final int tasksCount = requests.size() + toDelete.size(); + + // Nothing to do, don't start a group listener with 0 actions + if (tasksCount == 0) { + listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Set.of())); + return; + } + + GroupedActionListener taskListener = new GroupedActionListener<>(tasksCount, new ActionListener<>() { + @Override + public void onResponse(Collection booleans) { + listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Collections.unmodifiableSet(entities))); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + + for (var request : requests) { + roleMappingStore.putRoleMapping(request, taskListener); + } + + for (var mappingToDelete : toDelete) { + var deleteRequest = new DeleteRoleMappingRequest(); + deleteRequest.setName(mappingToDelete); + roleMappingStore.deleteRoleMapping(deleteRequest, taskListener); } } @Override - public List fromXContent(XContentParser parser) throws IOException { - List result = new ArrayList<>(); + public List fromXContent(XContentParser parser) throws IOException { + List result = new ArrayList<>(); + Map source = parser.map(); + for (String name : source.keySet()) { @SuppressWarnings("unchecked") Map content = (Map) source.get(name); try (XContentParser mappingParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) { - result.add(new PutRoleMappingRequestBuilder(null).source(name, mappingParser).request()); + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, mappingParser); + result.add(mapping); } } + return result; } - private Set validate(List roleMappings) { - var exceptions = new ArrayList(); - for (var roleMapping : roleMappings) { - // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX - var exception = roleMapping.validate(false); - if (exception != null) { - exceptions.add(exception); - } - } - if (exceptions.isEmpty() == false) { - var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); - exceptions.forEach(illegalArgumentException::addSuppressed); - throw illegalArgumentException; - } - return roleMappings.stream().map(PutRoleMappingRequest::getMapping).collect(Collectors.toUnmodifiableSet()); + public void securityIndexRecovered() { + securityIndexRecoveryListener.onResponse(null); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index b4e8d5d6db83f..811d357b89f89 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; @@ -18,7 +18,12 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -public class TransportDeleteRoleMappingAction extends HandledTransportAction { +import java.util.Optional; +import java.util.Set; + +public class TransportDeleteRoleMappingAction extends ReservedStateAwareHandledTransportAction< + DeleteRoleMappingRequest, + DeleteRoleMappingResponse> { private final NativeRoleMappingStore roleMappingStore; @@ -26,20 +31,25 @@ public class TransportDeleteRoleMappingAction extends HandledTransportAction listener) { + protected void doExecuteProtected(Task task, DeleteRoleMappingRequest request, ActionListener listener) { roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new)); } + + @Override + public Optional reservedStateHandlerName() { + return Optional.of(ReservedRoleMappingAction.NAME); + } + + @Override + public Set modifiedKeys(DeleteRoleMappingRequest request) { + return Set.of(request.getName()); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 44c72bc13a54b..5e32e4f903f81 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; @@ -18,7 +18,10 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -public class TransportPutRoleMappingAction extends HandledTransportAction { +import java.util.Optional; +import java.util.Set; + +public class TransportPutRoleMappingAction extends ReservedStateAwareHandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @@ -26,17 +29,32 @@ public class TransportPutRoleMappingAction extends HandledTransportAction listener) { + protected void doExecuteProtected( + Task task, + final PutRoleMappingRequest request, + final ActionListener listener + ) { roleMappingStore.putRoleMapping( request, ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure) ); } + + @Override + public Optional reservedStateHandlerName() { + return Optional.of(ReservedRoleMappingAction.NAME); + } + + @Override + public Set modifiedKeys(PutRoleMappingRequest request) { + return Set.of(request.getName()); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 6d76fac71e900..55a89e184f84f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -360,8 +360,9 @@ && hasRemoteIndices(request.getRoleDescriptors())) { return; } + final Set userRolesWithoutDescription = removeUserRoleDescriptorDescriptions(userRoleDescriptors); final Set filteredUserRoleDescriptors = maybeRemoveRemotePrivileges( - userRoleDescriptors, + userRolesWithoutDescription, transportVersion, request.getId() ); @@ -370,6 +371,28 @@ && hasRemoteIndices(request.getRoleDescriptors())) { } } + private Set removeUserRoleDescriptorDescriptions(Set userRoleDescriptors) { + return userRoleDescriptors.stream().map(roleDescriptor -> { + if (roleDescriptor.hasDescription()) { + return new RoleDescriptor( + roleDescriptor.getName(), + roleDescriptor.getClusterPrivileges(), + roleDescriptor.getIndicesPrivileges(), + roleDescriptor.getApplicationPrivileges(), + roleDescriptor.getConditionalClusterPrivileges(), + roleDescriptor.getRunAs(), + roleDescriptor.getMetadata(), + roleDescriptor.getTransientMetadata(), + roleDescriptor.getRemoteIndicesPrivileges(), + roleDescriptor.getRemoteClusterPermissions(), + roleDescriptor.getRestriction(), + null + ); + } + return roleDescriptor; + }).collect(Collectors.toSet()); + } + private TransportVersion getMinTransportVersion() { return clusterService.state().getMinTransportVersion(); } @@ -534,8 +557,9 @@ public void updateApiKeys( } final String[] apiKeyIds = request.getIds().toArray(String[]::new); + final Set userRolesWithoutDescription = removeUserRoleDescriptorDescriptions(userRoleDescriptors); final Set filteredUserRoleDescriptors = maybeRemoveRemotePrivileges( - userRoleDescriptors, + userRolesWithoutDescription, transportVersion, apiKeyIds ); @@ -673,7 +697,8 @@ static Set maybeRemoveRemotePrivileges( roleDescriptor.hasRemoteClusterPermissions() && transportVersion.before(ROLE_REMOTE_CLUSTER_PRIVS) ? null : roleDescriptor.getRemoteClusterPermissions(), - roleDescriptor.getRestriction() + roleDescriptor.getRestriction(), + roleDescriptor.getDescription() ); } return roleDescriptor; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index 71a78c1627946..7618135c8662f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -67,7 +67,10 @@ public class FileRolesStore implements BiConsumer, ActionListener, ActionListener< private static final Logger logger = LogManager.getLogger(NativeRolesStore.class); - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allow2xFormat(true).build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allow2xFormat(true) + .allowDescription(true) + .build(); private final Settings settings; private final Client client; @@ -272,9 +276,18 @@ public void putRole(final PutRoleRequest request, final RoleDescriptor role, fin "all nodes must have version [" + ROLE_REMOTE_CLUSTER_PRIVS + "] or higher to support remote cluster privileges" ) ); - } else { - innerPutRole(request, role, listener); - } + } else if (role.hasDescription() + && clusterService.state().getMinTransportVersion().before(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + listener.onFailure( + new IllegalStateException( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ) + ); + } else { + innerPutRole(request, role, listener); + } } // pkg-private for testing @@ -535,7 +548,8 @@ static RoleDescriptor transformRole(String id, BytesReference sourceBytes, Logge transientMap, roleDescriptor.getRemoteIndicesPrivileges(), roleDescriptor.getRemoteClusterPermissions(), - roleDescriptor.getRestriction() + roleDescriptor.getRestriction(), + roleDescriptor.getDescription() ); } else { return roleDescriptor; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java index 55562c8ee0138..e7e24037543fa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java @@ -8,8 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -19,7 +17,6 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -60,18 +57,12 @@ public String getName() { @Override public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - String name = request.param("name"); - String refresh = request.param("refresh"); - PutRoleMappingRequestBuilder requestBuilder; - try ( - XContentParser parser = XContentHelper.createParserNotCompressed( - LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, - request.requiredContent(), - request.getXContentType() - ) - ) { - requestBuilder = new PutRoleMappingRequestBuilder(client).source(name, parser).setRefreshPolicy(refresh); - } + final String name = request.param("name"); + PutRoleMappingRequestBuilder requestBuilder = new PutRoleMappingRequestBuilder(client).source( + name, + request.requiredContent(), + request.getXContentType() + ).setRefreshPolicy(request.param("refresh")); return channel -> requestBuilder.execute(new RestBuilderListener<>(channel) { @Override public RestResponse buildResponse(PutRoleMappingResponse response, XContentBuilder builder) throws Exception { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index ed198834d24f1..9e20cb05a3cdc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.VersionId; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; @@ -23,9 +24,12 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Arrays; import java.util.Collection; +import java.util.Comparator; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; @@ -38,7 +42,6 @@ public class SecuritySystemIndices { public static final int INTERNAL_MAIN_INDEX_FORMAT = 6; - public static final int INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT = 1; private static final int INTERNAL_TOKENS_INDEX_FORMAT = 7; private static final int INTERNAL_TOKENS_INDEX_MAPPINGS_FORMAT = 1; private static final int INTERNAL_PROFILE_INDEX_FORMAT = 8; @@ -119,18 +122,22 @@ private void checkInitialized() { } private SystemIndexDescriptor getSecurityMainIndexDescriptor() { - return SystemIndexDescriptor.builder() - // This can't just be `.security-*` because that would overlap with the tokens index pattern - .setIndexPattern(".security-[0-9]+*") - .setPrimaryIndex(MAIN_INDEX_CONCRETE_NAME) - .setDescription("Contains Security configuration") - .setMappings(getMainIndexMappings()) - .setSettings(getMainIndexSettings()) - .setAliasName(SECURITY_MAIN_ALIAS) - .setIndexFormat(INTERNAL_MAIN_INDEX_FORMAT) - .setVersionMetaKey(SECURITY_VERSION_STRING) - .setOrigin(SECURITY_ORIGIN) - .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS) + final Function securityIndexDescriptorBuilder = + mappingVersion -> SystemIndexDescriptor.builder() + // This can't just be `.security-*` because that would overlap with the tokens index pattern + .setIndexPattern(".security-[0-9]+*") + .setPrimaryIndex(MAIN_INDEX_CONCRETE_NAME) + .setDescription("Contains Security configuration") + .setMappings(getMainIndexMappings(mappingVersion)) + .setSettings(getMainIndexSettings()) + .setAliasName(SECURITY_MAIN_ALIAS) + .setIndexFormat(INTERNAL_MAIN_INDEX_FORMAT) + .setVersionMetaKey(SECURITY_VERSION_STRING) + .setOrigin(SECURITY_ORIGIN) + .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS); + + return securityIndexDescriptorBuilder.apply(SecurityMainIndexMappingVersion.latest()) + .setPriorSystemIndexDescriptors(List.of(securityIndexDescriptorBuilder.apply(SecurityMainIndexMappingVersion.INITIAL).build())) .build(); } @@ -149,14 +156,14 @@ private static Settings getMainIndexSettings() { .build(); } - private XContentBuilder getMainIndexMappings() { + private XContentBuilder getMainIndexMappings(SecurityMainIndexMappingVersion mappingVersion) { try { final XContentBuilder builder = jsonBuilder(); builder.startObject(); { builder.startObject("_meta"); builder.field(SECURITY_VERSION_STRING, BWC_MAPPINGS_VERSION); // Only needed for BWC with pre-8.15.0 nodes - builder.field(SystemIndexDescriptor.VERSION_META_KEY, INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT); + builder.field(SystemIndexDescriptor.VERSION_META_KEY, mappingVersion.id); builder.endObject(); builder.field("dynamic", "strict"); @@ -304,22 +311,24 @@ private XContentBuilder getMainIndexMappings() { } builder.endObject(); - builder.startObject("remote_cluster"); - { - builder.field("type", "object"); - builder.startObject("properties"); + if (mappingVersion.onOrAfter(SecurityMainIndexMappingVersion.ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS)) { + builder.startObject("remote_cluster"); { - builder.startObject("clusters"); - builder.field("type", "keyword"); - builder.endObject(); + builder.field("type", "object"); + builder.startObject("properties"); + { + builder.startObject("clusters"); + builder.field("type", "keyword"); + builder.endObject(); - builder.startObject("privileges"); - builder.field("type", "keyword"); + builder.startObject("privileges"); + builder.field("type", "keyword"); + builder.endObject(); + } builder.endObject(); } builder.endObject(); } - builder.endObject(); builder.startObject("applications"); { @@ -402,6 +411,12 @@ private XContentBuilder getMainIndexMappings() { builder.field("type", "keyword"); builder.endObject(); + if (mappingVersion.onOrAfter(SecurityMainIndexMappingVersion.ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS)) { + builder.startObject("description"); + builder.field("type", "text"); + builder.endObject(); + } + builder.startObject("run_as"); builder.field("type", "keyword"); builder.endObject(); @@ -1010,4 +1025,46 @@ private static void defineRealmDomain(XContentBuilder builder, String fieldName) builder.endObject(); } + /** + * Every change to the mapping of .security index must be versioned. When adding a new mapping version: + *
    + *
  • pick the next largest version ID - this will automatically become the new {@link #latest()} version
  • + *
  • add your mapping change in {@link #getMainIndexMappings(SecurityMainIndexMappingVersion)} conditionally to a new version
  • + *
  • make sure to set old latest version to "prior system index descriptors" in {@link #getSecurityMainIndexDescriptor()}
  • + *
+ */ + public enum SecurityMainIndexMappingVersion implements VersionId { + + /** + * Initial .security index mapping version. + */ + INITIAL(1), + + /** + * The mapping was changed to add new text description and remote_cluster fields. + */ + ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS(2), + + ; + + private static final SecurityMainIndexMappingVersion LATEST = Arrays.stream(values()) + .max(Comparator.comparingInt(v -> v.id)) + .orElseThrow(); + + private final int id; + + SecurityMainIndexMappingVersion(int id) { + assert id > 0; + this.id = id; + } + + @Override + public int id() { + return id; + } + + public static SecurityMainIndexMappingVersion latest() { + return LATEST; + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java index e8eb50e3a6529..a7014ece93ae5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java @@ -212,7 +212,7 @@ private Map getRoleDescriptors(String roleParameter) thr XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); final String roleName = parser.currentName(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - final RoleDescriptor role = RoleDescriptor.parserBuilder().build().parse(roleName, parser); + final RoleDescriptor role = RoleDescriptor.parserBuilder().allowDescription(true).build().parse(roleName, parser); roles.put(roleName, role); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java new file mode 100644 index 0000000000000..b4a07093e49c3 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; + +/** + * Mock Security Provider implementation for the {@link ReservedClusterStateHandlerProvider} service interface. This is used + * for {@link org.elasticsearch.test.ESIntegTestCase} because the Security Plugin is really LocalStateSecurity in those tests. + *

+ * Unlike {@link LocalReservedSecurityStateHandlerProvider} this implementation is mocked to implement the + * {@link UnstableLocalStateSecurity}. Separate implementation is needed, because the SPI creation code matches the constructor + * signature when instantiating. E.g. we need to match {@link UnstableLocalStateSecurity} instead of {@link LocalStateSecurity} + */ +public class LocalReservedUnstableSecurityStateHandlerProvider extends LocalReservedSecurityStateHandlerProvider { + public LocalReservedUnstableSecurityStateHandlerProvider() { + throw new IllegalStateException("Provider must be constructed using PluginsService"); + } + + public LocalReservedUnstableSecurityStateHandlerProvider(UnstableLocalStateSecurity plugin) { + super(plugin); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java new file mode 100644 index 0000000000000..5621bdced15b3 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reservedstate.NonStateTransformResult; +import org.elasticsearch.reservedstate.ReservedClusterStateHandler; +import org.elasticsearch.reservedstate.TransformState; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; + +import java.nio.file.Path; +import java.util.Collection; +import java.util.List; +import java.util.Optional; + +/** + * A test class that allows us to Inject new type of Reserved Handler that can + * simulate errors in saving role mappings. + *

+ * We can't use our regular path to simply make an extension of LocalStateSecurity + * in an integration test class, because the reserved handlers are injected through + * SPI. (see {@link LocalReservedUnstableSecurityStateHandlerProvider}) + */ +public final class UnstableLocalStateSecurity extends LocalStateSecurity { + + public UnstableLocalStateSecurity(Settings settings, Path configPath) throws Exception { + super(settings, configPath); + // We reuse most of the initialization of LocalStateSecurity, we then just overwrite + // the security plugin with an extra method to give us a fake RoleMappingAction. + Optional security = plugins.stream().filter(p -> p instanceof Security).findFirst(); + if (security.isPresent()) { + plugins.remove(security.get()); + } + + UnstableLocalStateSecurity thisVar = this; + var action = new ReservedUnstableRoleMappingAction(); + + plugins.add(new Security(settings, super.securityExtensions()) { + @Override + protected SSLService getSslService() { + return thisVar.getSslService(); + } + + @Override + protected XPackLicenseState getLicenseState() { + return thisVar.getLicenseState(); + } + + @Override + List> reservedClusterStateHandlers() { + // pretend the security index is initialized after 2 seconds + var timer = new java.util.Timer(); + timer.schedule(new java.util.TimerTask() { + @Override + public void run() { + action.securityIndexRecovered(); + timer.cancel(); + } + }, 2_000); + return List.of(action); + } + }); + } + + public static class ReservedUnstableRoleMappingAction extends ReservedRoleMappingAction { + /** + * Creates a fake ReservedRoleMappingAction that doesn't actually use the role mapping store + */ + public ReservedUnstableRoleMappingAction() { + // we don't actually need a NativeRoleMappingStore + super(null); + } + + /** + * The nonStateTransform method is the only one that uses the native store, we simply pretend + * something has called the onFailure method of the listener. + */ + @Override + protected void nonStateTransform( + Collection requests, + TransformState prevState, + ActionListener listener + ) { + listener.onFailure(new IllegalStateException("Fake exception")); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java index cac7c91f73ed1..6cdca0cb3b24d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java @@ -7,40 +7,77 @@ package org.elasticsearch.xpack.security.action.reservedstate; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.TransformState; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.Collections; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; /** * Tests that the ReservedRoleMappingAction does validation, can add and remove role mappings */ public class ReservedRoleMappingActionTests extends ESTestCase { - private TransformState processJSON(ReservedRoleMappingAction action, TransformState prevState, String json) throws Exception { try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { var content = action.fromXContent(parser); var state = action.transform(content, prevState); - assertThat(state.nonStateTransform(), nullValue()); - return state; + + CountDownLatch latch = new CountDownLatch(1); + AtomicReference> updatedKeys = new AtomicReference<>(); + AtomicReference error = new AtomicReference<>(); + state.nonStateTransform().accept(new ActionListener<>() { + @Override + public void onResponse(NonStateTransformResult nonStateTransformResult) { + updatedKeys.set(nonStateTransformResult.updatedKeys()); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + error.set(e); + latch.countDown(); + } + }); + + latch.await(); + if (error.get() != null) { + throw error.get(); + } + return new TransformState(state.state(), updatedKeys.get()); } } public void testValidation() { + var nativeRoleMappingStore = mockNativeRoleMappingStore(); + ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(); + ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); + action.securityIndexRecovered(); + String badPolicyJSON = """ { "everyone_kibana": { @@ -60,6 +97,7 @@ public void testValidation() { } } }"""; + assertEquals( "failed to parse role-mapping [everyone_fleet]. missing field [rules]", expectThrows(ParsingException.class, () -> processJSON(action, prevState, badPolicyJSON)).getMessage() @@ -67,9 +105,13 @@ public void testValidation() { } public void testAddRemoveRoleMapping() throws Exception { + var nativeRoleMappingStore = mockNativeRoleMappingStore(); + ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(); + ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); + action.securityIndexRecovered(); + String emptyJSON = ""; TransformState updatedState = processJSON(action, prevState, emptyJSON); @@ -105,4 +147,102 @@ public void testAddRemoveRoleMapping() throws Exception { updatedState = processJSON(action, prevState, emptyJSON); assertThat(updatedState.keys(), empty()); } + + @SuppressWarnings("unchecked") + public void testNonStateTransformWaitsOnAsyncActions() throws Exception { + var nativeRoleMappingStore = mockNativeRoleMappingStore(); + + doAnswer(invocation -> { + new Thread(() -> { + // Simulate put role mapping async action taking a while + try { + Thread.sleep(1_000); + ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }).start(); + + return null; + }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); + + doAnswer(invocation -> { + new Thread(() -> { + // Simulate delete role mapping async action taking a while + try { + Thread.sleep(1_000); + ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }).start(); + + return null; + }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); + + ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); + TransformState updatedState = new TransformState(state, Collections.emptySet()); + ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); + action.securityIndexRecovered(); + + String json = """ + { + "everyone_kibana": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_reserved": true + } + }, + "everyone_fleet": { + "enabled": true, + "roles": [ "fleet_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "a9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_reserved": true + } + } + }"""; + + assertEquals( + "err_done", + expectThrows(IllegalStateException.class, () -> processJSON(action, new TransformState(state, Collections.emptySet()), json)) + .getMessage() + ); + + // Now that we've tested that we wait on putRoleMapping correctly, let it finish without exception, so we can test error on delete + doAnswer(invocation -> { + ((ActionListener) invocation.getArgument(1)).onResponse(true); + return null; + }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); + + updatedState = processJSON(action, updatedState, json); + assertThat(updatedState.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); + + final TransformState currentState = new TransformState(updatedState.state(), updatedState.keys()); + + assertEquals("err_done", expectThrows(IllegalStateException.class, () -> processJSON(action, currentState, "")).getMessage()); + } + + @SuppressWarnings("unchecked") + private NativeRoleMappingStore mockNativeRoleMappingStore() { + final NativeRoleMappingStore nativeRoleMappingStore = spy( + new NativeRoleMappingStore(Settings.EMPTY, mock(Client.class), mock(SecurityIndexManager.class), mock(ScriptService.class)) + ); + + doAnswer(invocation -> { + ((ActionListener) invocation.getArgument(1)).onResponse(true); + return null; + }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); + + doAnswer(invocation -> { + ((ActionListener) invocation.getArgument(1)).onResponse(true); + return null; + }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); + + return nativeRoleMappingStore; + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java new file mode 100644 index 0000000000000..038e673e07862 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.rolemapping; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; + +import java.util.Collections; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.mockito.Mockito.mock; + +public class TransportDeleteRoleMappingActionTests extends ESTestCase { + public void testReservedStateHandler() { + var store = mock(NativeRoleMappingStore.class); + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + mock(ThreadPool.class), + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + var action = new TransportDeleteRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); + + assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); + + var deleteRequest = new DeleteRoleMappingRequest(); + deleteRequest.setName("kibana_all"); + assertThat(action.modifiedKeys(deleteRequest), containsInAnyOrder("kibana_all")); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 6f789a10a3a6c..58a8e8e3d4751 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -9,12 +9,16 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -29,6 +33,7 @@ import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; @@ -55,7 +60,7 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store); + action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); requestRef = new AtomicReference<>(null); @@ -94,7 +99,39 @@ private PutRoleMappingResponse put(String name, FieldExpression expression, Stri request.setMetadata(metadata); request.setEnabled(true); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(mock(Task.class), request, future); + action.doExecuteProtected(mock(Task.class), request, future); return future.get(); } + + public void testReservedStateHandler() throws Exception { + assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); + String json = """ + { + "everyone_kibana": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" + } + }, + "everyone_fleet": { + "enabled": true, + "roles": [ "fleet_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7" + } + } + }"""; + + try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { + ReservedRoleMappingAction roleMappingAction = new ReservedRoleMappingAction(store); + var parsedResult = roleMappingAction.fromXContent(parser); + + for (var mapping : parsedResult) { + assertThat(action.modifiedKeys(PutRoleMappingRequest.fromMapping(mapping)), containsInAnyOrder(mapping.getName())); + } + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 107f7c0632ea7..7752b85c6345c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -103,7 +103,7 @@ import org.elasticsearch.xpack.core.security.authc.support.AuthenticationContextSerializer; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; @@ -1857,6 +1857,7 @@ public void testApiKeyDocCache() throws IOException, ExecutionException, Interru ApiKeyCredentials apiKeyCredentials3 = getApiKeyCredentials(docId3, apiKey3, type); final List keyRoles = List.of( RoleDescriptor.parserBuilder() + .allowRestriction(true) .allow2xFormat(true) .build() .parse("key-role", new BytesArray("{\"cluster\":[\"monitor\"]}"), XContentType.JSON) @@ -2348,12 +2349,12 @@ public void testMaybeBuildUpdatedDocument() throws IOException { final ApiKey.Type type = randomFrom(ApiKey.Type.values()); final Set oldUserRoles = type == ApiKey.Type.CROSS_CLUSTER ? Set.of() - : randomSet(0, 3, RoleDescriptorTests::randomRoleDescriptor); + : randomSet(0, 3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()); final List oldKeyRoles; if (type == ApiKey.Type.CROSS_CLUSTER) { oldKeyRoles = List.of(CrossClusterApiKeyRoleDescriptorBuilder.parse(randomCrossClusterApiKeyAccessField()).build()); } else { - oldKeyRoles = randomList(3, RoleDescriptorTests::randomRoleDescriptor); + oldKeyRoles = randomList(3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()); } final long now = randomMillisUpToYear9999(); when(clock.instant()).thenReturn(Instant.ofEpochMilli(now)); @@ -2388,7 +2389,10 @@ public void testMaybeBuildUpdatedDocument() throws IOException { final boolean changeExpiration = randomBoolean(); final Set newUserRoles = changeUserRoles - ? randomValueOtherThan(oldUserRoles, () -> randomSet(0, 3, RoleDescriptorTests::randomRoleDescriptor)) + ? randomValueOtherThan( + oldUserRoles, + () -> randomSet(0, 3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()) + ) : oldUserRoles; final List newKeyRoles; if (changeKeyRoles) { @@ -2401,7 +2405,10 @@ public void testMaybeBuildUpdatedDocument() throws IOException { } }); } else { - newKeyRoles = randomValueOtherThan(oldKeyRoles, () -> randomList(0, 3, RoleDescriptorTests::randomRoleDescriptor)); + newKeyRoles = randomValueOtherThan( + oldKeyRoles, + () -> randomList(0, 3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()) + ); } } else { newKeyRoles = randomBoolean() ? oldKeyRoles : null; @@ -2582,7 +2589,16 @@ public void testGetApiKeyMetadata() throws IOException { public void testMaybeRemoveRemoteIndicesPrivilegesWithUnsupportedVersion() { final String apiKeyId = randomAlphaOfLengthBetween(5, 8); final Set userRoleDescriptors = Set.copyOf( - randomList(2, 5, () -> RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), randomBoolean(), false)) + randomList( + 2, + 5, + () -> RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(randomBoolean()) + .allowRemoteClusters(false) + .build() + ) ); // Selecting random unsupported version. @@ -2615,11 +2631,7 @@ public void testMaybeRemoveRemoteIndicesPrivilegesWithUnsupportedVersion() { public void testMaybeRemoveRemoteClusterPrivilegesWithUnsupportedVersion() { final String apiKeyId = randomAlphaOfLengthBetween(5, 8); final Set userRoleDescriptors = Set.copyOf( - randomList( - 2, - 5, - () -> RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()) - ) + randomList(2, 5, () -> RoleDescriptorTestHelper.builder().allowRemoteClusters(true).build()) ); // Selecting random unsupported version. @@ -2931,7 +2943,12 @@ public void testValidateOwnerUserRoleDescriptorsWithWorkflowsRestriction() { final List requestRoleDescriptors = randomList( 0, 1, - () -> RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), false, randomBoolean(), false) + () -> RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(false) + .allowRestriction(randomBoolean()) + .allowRemoteClusters(false) + .build() ); final AbstractCreateApiKeyRequest createRequest = mock(AbstractCreateApiKeyRequest.class); @@ -2959,34 +2976,23 @@ private static RoleDescriptor randomRoleDescriptorWithRemotePrivileges() { return new RoleDescriptor( randomAlphaOfLengthBetween(3, 90), randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), - RoleDescriptorTests.randomIndicesPrivileges(0, 3), - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + RoleDescriptorTestHelper.randomIndicesPrivileges(0, 3), + RoleDescriptorTestHelper.randomApplicationPrivileges(), + RoleDescriptorTestHelper.randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(randomBoolean()), + RoleDescriptorTestHelper.randomRoleDescriptorMetadata(randomBoolean()), Map.of(), - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 3), + RoleDescriptorTestHelper.randomRemoteIndicesPrivileges(1, 3), new RemoteClusterPermissions().addGroup( new RemoteClusterPermissionGroup(new String[] { "monitor_enrich" }, new String[] { "*" }) ), - RoleRestrictionTests.randomWorkflowsRestriction(1, 3) + RoleRestrictionTests.randomWorkflowsRestriction(1, 3), + randomAlphaOfLengthBetween(0, 10) ); } private static RoleDescriptor randomRoleDescriptorWithWorkflowsRestriction() { - return new RoleDescriptor( - randomAlphaOfLengthBetween(3, 90), - randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), - RoleDescriptorTests.randomIndicesPrivileges(0, 3), - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), - generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(randomBoolean()), - Map.of(), - null, - null, - RoleRestrictionTests.randomWorkflowsRestriction(1, 3) - ); + return RoleDescriptorTestHelper.builder().allowReservedMetadata(true).allowRestriction(true).allowRemoteIndices(false).build(); } public static String randomCrossClusterApiKeyAccessField() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java index 20555ced32bd7..7219561dcf9df 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java @@ -164,7 +164,7 @@ public void testExceptionProcessingRequestOnInvalidCrossClusterAccessSubjectInfo // Invalid internal user AuthenticationTestHelper.builder().internal(InternalUsers.XPACK_USER).build(), new RoleDescriptorsIntersection( - new RoleDescriptor("invalid_role", new String[] { "all" }, null, null, null, null, null, null, null, null, null) + new RoleDescriptor("invalid_role", new String[] { "all" }, null, null, null, null, null, null, null, null, null, null) ) ) ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java index 664eec036832a..f567057d5b410 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java @@ -19,7 +19,7 @@ import java.util.Base64; import java.util.Set; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY; import static org.hamcrest.Matchers.equalTo; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java index 08628c1a5f5af..501c0bee36264 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; @@ -74,7 +74,8 @@ public void testGetRoleDescriptorsIntersectionForRemoteCluster() throws IOExcept .build(), randomNonEmptySubsetOf(List.of(concreteClusterAlias, "*")).toArray(new String[0]) ) }, - null, // TODO: add tests here + null, + null, null ) ); @@ -133,7 +134,13 @@ public void testCrossClusterAccessWithInvalidRoleDescriptors() { new RoleDescriptorsIntersection( randomValueOtherThanMany( rd -> false == rd.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), - () -> RoleDescriptorTests.randomRoleDescriptor() + () -> RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(randomBoolean()) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() ) ) ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java index e06f6f212c687..8295f028588cc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; @@ -195,7 +194,6 @@ private void testLogging( ); final MockLogAppender mockAppender = new MockLogAppender(); try (var ignored = mockAppender.capturing(timerLogger.getName())) { - Loggers.addAppender(timerLogger, mockAppender); mockAppender.addExpectation(expectation); checker.accept(List.of()); mockAppender.assertAllExpectationsMatched(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java index 1923d4d86dc71..d71c2b0d19074 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java @@ -74,7 +74,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.permission.ApplicationPermission; import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; @@ -199,7 +199,13 @@ public void testResolveAuthorizationInfoForEmptyRestrictedRolesWithAuthenticatio @SuppressWarnings("unchecked") final var listener = (ActionListener>) invocation.getArgument(1); final Supplier randomRoleSupplier = () -> Role.buildFromRoleDescriptor( - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), false, randomBoolean(), false), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(false) + .allowRestriction(randomBoolean()) + .allowDescription(randomBoolean()) + .allowRemoteClusters(false) + .build(), new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES, List.of() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index e039f0c66eaeb..fd32bde0f3c53 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -64,7 +64,7 @@ import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetBitsetCache; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; @@ -959,7 +959,8 @@ public ClusterPermission.Builder buildPermission(ClusterPermission.Builder build RoleDescriptor.RemoteIndicesPrivileges.builder("remote-*", "remote").indices("abc-*", "xyz-*").privileges("read").build(), RoleDescriptor.RemoteIndicesPrivileges.builder("remote-*").indices("remote-idx-1-*").privileges("read").build(), }, getValidRemoteClusterPermissions(new String[] { "remote-*" }), - null + null, + randomAlphaOfLengthBetween(0, 20) ); ConfigurableClusterPrivilege ccp2 = new MockConfigurableClusterPrivilege() { @@ -988,7 +989,8 @@ public ClusterPermission.Builder buildPermission(ClusterPermission.Builder build RoleDescriptor.RemoteIndicesPrivileges.builder("*").indices("remote-idx-2-*").privileges("read").build(), RoleDescriptor.RemoteIndicesPrivileges.builder("remote-*").indices("remote-idx-3-*").privileges("read").build() }, null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); FieldPermissionsCache cache = new FieldPermissionsCache(Settings.EMPTY); @@ -1100,7 +1102,15 @@ public void testBuildRoleWithSingleRemoteClusterDefinition() { } public void testBuildRoleFromDescriptorsWithSingleRestriction() { - Role role = buildRole(RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean())); + Role role = buildRole( + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() + ); assertThat(role.hasWorkflowsRestriction(), equalTo(true)); } @@ -1108,8 +1118,20 @@ public void testBuildRoleFromDescriptorsWithViolationOfRestrictionValidation() { var e = expectThrows( IllegalArgumentException.class, () -> buildRole( - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean()), - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean()) + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() ) ); assertThat(e.getMessage(), containsString("more than one role descriptor with restriction is not allowed")); @@ -1117,9 +1139,27 @@ public void testBuildRoleFromDescriptorsWithViolationOfRestrictionValidation() { e = expectThrows( IllegalArgumentException.class, () -> buildRole( - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean()), - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), false, randomBoolean()), - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), false, randomBoolean()) + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(false) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(false) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() ) ); assertThat(e.getMessage(), containsString("combining role descriptors with and without restriction is not allowed")); @@ -2145,6 +2185,7 @@ public void testGetRoleForCrossClusterAccessAuthentication() throws Exception { null, null, null, + null, null ) ) @@ -3089,11 +3130,11 @@ private RoleDescriptor roleDescriptorWithIndicesPrivileges( final RoleDescriptor.RemoteIndicesPrivileges[] rips, final IndicesPrivileges[] ips ) { - return new RoleDescriptor(name, null, ips, null, null, null, null, null, rips, null, null); + return new RoleDescriptor(name, null, ips, null, null, null, null, null, rips, null, null, null); } private RoleDescriptor roleDescriptorWithRemoteClusterPrivileges(final String name, RemoteClusterPermissions remoteClusterPermissions) { - return new RoleDescriptor(name, null, null, null, null, null, null, null, null, remoteClusterPermissions, null); + return new RoleDescriptor(name, null, null, null, null, null, null, null, null, remoteClusterPermissions, null, null); } private RemoteClusterPermissions getValidRemoteClusterPermissions(String[] aliases) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 3d30a3534d422..0a2c40d2a257a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -110,7 +110,7 @@ public void testParseFile() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(10)); + assertThat(roles.size(), is(11)); RoleDescriptor descriptor = roles.get("role1"); assertNotNull(descriptor); @@ -286,6 +286,18 @@ public void testParseFile() throws Exception { assertThat(group.getQuery(), notNullValue()); assertThat(roles.get("role_query_invalid"), nullValue()); + + descriptor = roles.get("role_with_description"); + assertNotNull(descriptor); + assertThat(descriptor.getDescription(), is(equalTo("Allows all security-related operations!"))); + role = Role.buildFromRoleDescriptor(descriptor, new FieldPermissionsCache(Settings.EMPTY), restrictedIndices); + assertThat(role, notNullValue()); + assertThat(role.names(), equalTo(new String[] { "role_with_description" })); + assertThat(role.cluster(), notNullValue()); + assertThat(role.cluster().privileges(), equalTo(Set.of(ClusterPrivilegeResolver.MANAGE_SECURITY))); + assertThat(role.indices(), is(IndicesPermission.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); + } public void testParseFileWithRemoteIndicesAndCluster() throws IllegalAccessException, IOException { @@ -395,7 +407,7 @@ public void testParseFileWithFLSAndDLSDisabled() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(7)); + assertThat(roles.size(), is(8)); assertThat(roles.get("role_fields"), nullValue()); assertThat(roles.get("role_query"), nullValue()); assertThat(roles.get("role_query_fields"), nullValue()); @@ -452,7 +464,7 @@ public void testParseFileWithFLSAndDLSUnlicensed() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(10)); + assertThat(roles.size(), is(11)); assertNotNull(roles.get("role_fields")); assertNotNull(roles.get("role_query")); assertNotNull(roles.get("role_query_fields")); @@ -664,7 +676,7 @@ public void testThatInvalidRoleDefinitions() throws Exception { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "valid_role" })); - assertThat(entries, hasSize(7)); + assertThat(entries, hasSize(8)); assertThat( entries.get(0), startsWith("invalid role definition [fóóbár] in roles file [" + path.toAbsolutePath() + "]. invalid role name") @@ -675,6 +687,10 @@ public void testThatInvalidRoleDefinitions() throws Exception { assertThat(entries.get(4), startsWith("failed to parse role [role4]")); assertThat(entries.get(5), startsWith("failed to parse indices privileges for role [role5]")); assertThat(entries.get(6), startsWith("failed to parse role [role6]. unexpected field [restriction]")); + assertThat( + entries.get(7), + startsWith("invalid role definition [role7] in roles file [" + path.toAbsolutePath() + "]. invalid description") + ); } public void testThatRoleNamesDoesNotResolvePermissions() throws Exception { @@ -683,8 +699,8 @@ public void testThatRoleNamesDoesNotResolvePermissions() throws Exception { List events = CapturingLogger.output(logger.getName(), Level.ERROR); events.clear(); Set roleNames = FileRolesStore.parseFileForRoleNames(path, logger); - assertThat(roleNames.size(), is(7)); - assertThat(roleNames, containsInAnyOrder("valid_role", "role1", "role2", "role3", "role4", "role5", "role6")); + assertThat(roleNames.size(), is(8)); + assertThat(roleNames, containsInAnyOrder("valid_role", "role1", "role2", "role3", "role4", "role5", "role6", "role7")); assertThat(events, hasSize(1)); assertThat( @@ -746,7 +762,7 @@ public void testUsageStats() throws Exception { Map usageStats = store.usageStats(); - assertThat(usageStats.get("size"), is(flsDlsEnabled ? 10 : 7)); + assertThat(usageStats.get("size"), is(flsDlsEnabled ? 11 : 8)); assertThat(usageStats.get("remote_indices"), is(1L)); assertThat(usageStats.get("remote_cluster"), is(1L)); assertThat(usageStats.get("fls"), is(flsDlsEnabled)); @@ -781,7 +797,7 @@ public void testExists() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(10)); + assertThat(roles.size(), is(11)); for (var role : roles.keySet()) { assertThat(store.exists(role), is(true)); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 35591f99727f2..9d83d5f5c60ed 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -50,7 +50,6 @@ import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; @@ -76,6 +75,10 @@ import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.elasticsearch.xpack.core.security.SecurityField.DOCUMENT_LEVEL_SECURITY_FEATURE; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomClusterPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -130,14 +133,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), new IndicesPrivileges[] { IndicesPrivileges.builder().privileges("READ").indices("*").grantedFields("*").deniedFields("foo").build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(flsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -147,14 +151,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { "dls", randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ").query(matchAllBytes).build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(dlsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -169,14 +174,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { .deniedFields("foo") .query(matchAllBytes) .build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(flsDlsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -184,14 +190,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { "no_fls_dls", randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ").build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(noFlsDlsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -281,14 +288,15 @@ public void testTransformingRoleWithRestrictionFails() throws IOException { : "{ \"match_all\": {} }" ) .build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - RoleRestrictionTests.randomWorkflowsRestriction(1, 2) + RoleRestrictionTests.randomWorkflowsRestriction(1, 2), + randomAlphaOfLengthBetween(0, 20) ); XContentBuilder builder = roleWithRestriction.toXContent( @@ -463,6 +471,7 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final null, remoteIndicesPrivileges, remoteClusterPermissions, + null, null ); PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index ca974e4e1e723..f076dc24e5d5b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -1483,6 +1483,7 @@ private static ApiKey createApiKeyForOwner(String apiKeyId, String username, Str null, null, null, + null, null ) ), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java index 810ef4056fd99..577a8eb9f698e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java @@ -42,8 +42,8 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomCrossClusterAccessRoleDescriptor; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomCrossClusterAccessRoleDescriptor; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.is; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java index 8849edca70d68..6b60336276c35 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.security.support.CacheInvalidatorRegistry.CacheInvalidator; +import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; import org.junit.Before; import java.time.Instant; import java.util.List; import java.util.Set; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT; import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -61,7 +61,7 @@ public void testSecurityIndexStateChangeWillInvalidateAllRegisteredInvalidators( true, true, true, - new SystemIndexDescriptor.MappingsVersion(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT, 0), + new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0), ".security", ClusterHealthStatus.GREEN, IndexMetadata.State.OPEN, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 2abeeb3fa040b..a7c5c616cf5bf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; +import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.hamcrest.Matchers; import org.junit.Before; @@ -63,7 +64,6 @@ import java.util.function.BiConsumer; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -389,7 +389,10 @@ public void testCanUpdateIndexMappings() { // Ensure that the mappings for the index are out-of-date, so that the security index manager will // attempt to update them. - int previousVersion = INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT - 1; + int previousVersion = randomValueOtherThanMany( + v -> v.onOrAfter(SecurityMainIndexMappingVersion.latest()), + () -> randomFrom(SecurityMainIndexMappingVersion.values()) + ).id(); // State recovered with index, with mappings with a prior version ClusterState.Builder clusterStateBuilder = createClusterState( @@ -419,11 +422,15 @@ public void testCannotUpdateIndexMappingsWhenMinMappingVersionTooLow() { // Hard-code a failure here. doReturn("Nope").when(descriptorSpy).getMinimumMappingsVersionMessage(anyString()); - doReturn(null).when(descriptorSpy).getDescriptorCompatibleWith(eq(new SystemIndexDescriptor.MappingsVersion(1, 0))); + doReturn(null).when(descriptorSpy) + .getDescriptorCompatibleWith(eq(new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0))); // Ensure that the mappings for the index are out-of-date, so that the security index manager will // attempt to update them. - int previousVersion = INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT - 1; + int previousVersion = randomValueOtherThanMany( + v -> v.onOrAfter(SecurityMainIndexMappingVersion.latest()), + () -> randomFrom(SecurityMainIndexMappingVersion.values()) + ).id(); ClusterState.Builder clusterStateBuilder = createClusterState( TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7, @@ -457,7 +464,7 @@ public void testNoUpdateWhenIndexMappingsVersionNotBumped() { SecuritySystemIndices.SECURITY_MAIN_ALIAS, SecuritySystemIndices.INTERNAL_MAIN_INDEX_FORMAT, IndexMetadata.State.OPEN, - getMappings(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT) + getMappings(SecurityMainIndexMappingVersion.latest().id()) ); manager.clusterChanged(event(markShardsAvailable(clusterStateBuilder))); manager.prepareIndexIfNeededThenExecute(prepareException::set, () -> prepareRunnableCalled.set(true)); @@ -480,7 +487,7 @@ public void testNoUpdateWhenNoIndexMappingsVersionInClusterState() { SecuritySystemIndices.SECURITY_MAIN_ALIAS, SecuritySystemIndices.INTERNAL_MAIN_INDEX_FORMAT, IndexMetadata.State.OPEN, - getMappings(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT), + getMappings(SecurityMainIndexMappingVersion.latest().id()), Map.of() ); manager.clusterChanged(event(markShardsAvailable(clusterStateBuilder))); @@ -628,7 +635,7 @@ private static ClusterState.Builder createClusterState( format, state, mappings, - Map.of(indexName, new SystemIndexDescriptor.MappingsVersion(1, 0)) + Map.of(indexName, new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0)) ); } @@ -689,7 +696,7 @@ private static IndexMetadata.Builder getIndexMetadata( } private static String getMappings() { - return getMappings(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT); + return getMappings(SecurityMainIndexMappingVersion.latest().id()); } private static String getMappings(Integer version) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java new file mode 100644 index 0000000000000..7550b96fdf4f9 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; + +import java.util.HashMap; +import java.util.Map; + +public class SecurityMainIndexMappingVersionTests extends ESTestCase { + + public void testVersionIdUniqueness() { + Map ids = new HashMap<>(); + for (var version : SecurityMainIndexMappingVersion.values()) { + var existing = ids.put(version.id(), version); + if (existing != null) { + fail( + "duplicate ID [" + + version.id() + + "] definition found in SecurityMainIndexMappingVersion for [" + + version + + "] and [" + + existing + + "]" + ); + } + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java index 473cf5ee387b8..00f170a4cf8d8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java @@ -88,7 +88,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_PROFILE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo.CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; diff --git a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider index 3d17572429bac..77c38d302d9c9 100644 --- a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider +++ b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider @@ -6,3 +6,4 @@ # org.elasticsearch.xpack.security.LocalReservedSecurityStateHandlerProvider +org.elasticsearch.xpack.security.LocalReservedUnstableSecurityStateHandlerProvider diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml index 21e9d87189cf0..fa0addce53035 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml @@ -58,3 +58,6 @@ role6: workflows: - workflow1 - workflow2 +role7: + description: + "tJywjBJUSwXDiRtpoJxEotFupzVVUIfwnoFMFiTwRoFiURksYxmQOaoykJIYwFvNpiGnfFePFUrCPTEbDXPkXQudrpBikHSQmdqvNjxXvktEghvvIQuzZitqwKjmnQvqlDfqYXSccRiqEslDdkjdcXPmSSggJMqrXmkdNtwBItbjLpHdNPuSgVYLwcBCblGHysaXJFcZHLFbqhirxNGTkENBMpzTXjsMXwSEnqKUZtDSckxGUyFfKXCvumgJkjLrrBvSxjnanuHpmXzUlFGEHqqxJjAstxSGKnPPzzsuZAlsrLTAzAdpBOnLDMdOBDyAweiCLzIvyfwuTWcOMGRWItPUdEdqcLjlYRhOgpTuWsDQcrCYnlIuiEpBodlGwaCDYnppZWmBDMyQCSPSTCwjilXtqmTuwuxwfyCNLbqNWjzKOPhEPsKjuvNpexRhleNgMqrDpmhWOZzRZMDnLYIjNJZKdsgErOoVuyUlJAKnJlpevIZUjXDIyybxXaaFGztppkpMAOVLFHjbiJuGVDdpyBHwxlyvPJOgVeViYZNiKEOWmaIypbuWenBnYRvSdYiHHaSLwuNILDIrAqoNBiFBdMhuLvTKOkepMYFcbXpYqLWYmtPYIVXGfHPUgmYhhsfIatqwhhnefxfTeqqUlVLmLcNAjiBFiiCRfiQvtvWOWJyfATrUeCVNfquIXHzHQWPWtbpeTiYTUvEPQWeeTjKpHrycLmKpsWjCLteqlutXgaeLSAvDvbvrlJZyAWflVnuzdcNxtzfcEocKsoJGOfjKXyQlxapPvOyDZYbvHYoYljYHTrEVPbMOQuwMxKPYkbyEDJuMqOtfgqVHZpsaimFmQjTlAdNOwtDTJdJhZVzgpVTWZCJRBopvQZgbIzPEJOoCVlYRhLDRARxmlrxrAMApKaZxfiMDyhMVZKXCankStqBfYSYOmtYMvkARtngxNINwAehRhDNMZoZuGTylxteKhLqFVKudMuSCpRfCxjNsanWHVvghUJYpcxildbvAhgpU" diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml index cb956ff970800..ec0d325566127 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml @@ -92,3 +92,9 @@ role_remote: - 'remote-*' privileges: - "monitor_enrich" + +role_with_description: + description: + "Allows all security-related operations!" + cluster: + - manage_security diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java index 5638450bfec75..31c9639facd93 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java @@ -78,7 +78,6 @@ protected Settings repositorySettings() { return Settings.builder().put("client", "repository_test_kit").put("container", container).put("base_path", basePath).build(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107502") @Override public void testRepositoryAnalysis() throws Exception { super.testRepositoryAnalysis(); diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/10_analyze.yml b/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/10_analyze.yml index 648eb3766fffb..e5babad76eb05 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/10_analyze.yml +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/10_analyze.yml @@ -29,8 +29,8 @@ setup: --- "Analysis fails on readonly repositories": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: @@ -45,8 +45,8 @@ setup: --- "Analysis without details": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: @@ -100,8 +100,8 @@ setup: --- "Analysis with details": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: @@ -131,8 +131,8 @@ setup: --- "Analysis with ?human=false": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: @@ -157,8 +157,8 @@ setup: --- "Timeout with large blobs": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec index 1615ee3a64256..f6a6cec5dc65b 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec @@ -130,8 +130,7 @@ SELECT COUNT(*), TRUNCATE(emp_no, -2) t FROM test_emp WHERE 'aaabbb' RLIKE 'a{2, 1 |10100 ; -// AwaitsFix https://github.com/elastic/elasticsearch/issues/96805 -inWithCompatibleDateTypes-Ignore +inWithCompatibleDateTypes SELECT birth_date FROM test_emp WHERE birth_date IN ({d '1959-07-23'}, CAST('1959-12-25T00:00:00' AS TIMESTAMP), '1964-06-02T00:00:00.000Z') OR birth_date IS NULL ORDER BY birth_date; birth_date:ts diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml index 44758422ff415..2487235a2383e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml @@ -293,8 +293,8 @@ --- "Test fields api": - - skip: - version: "- 8.3.99" + - requires: + cluster_features: "gte_v8.4.0" reason: "Breaking change introduced in 8.4.0" - do: indices.create: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml index 861247350c9f5..485d2c1d99f47 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml @@ -417,10 +417,14 @@ setup: --- Cardinality agg: + - requires: + cluster_features: "gte_v7.7.0" + reason: "constant_keyword was added in 7.7" - skip: - version: " - 7.6.99, 8.9.00 - 8.10.99" - reason: "constant_keyword was added in 7.7, bug introduced in 8.9 and fixed in 8.11" - + known_issues: + - cluster_feature: "gte_v8.9.0" + fixed_by: "gte_v8.11.0" + reason: "bug introduced in 8.9 and fixed in 8.11" - do: indices.create: index: test3 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml index 3033c83af8e33..7ad16faae2314 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml @@ -1,9 +1,9 @@ --- "Test data stream lifecycle usage stats": - - skip: - version: "- 8.10.99" + - requires: + cluster_features: "gte_v8.11.0" reason: "the data stream lifecycle stats were updated to the usage api in 8.11" - features: allowed_warnings + test_runner_features: "allowed_warnings" - do: xpack.usage: {} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml index e505d11cbe137..ba5de9765db17 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml @@ -1,10 +1,9 @@ --- setup: - skip: - version: "all" - reason: "waiting for final decisions on supporting generic expressions on the right https://github.com/elastic/elasticsearch/issues/103599" - - features: allowed_warnings_regex + awaits_fix: "waiting for final decisions on supporting generic expressions on the right https://github.com/elastic/elasticsearch/issues/103599" + - requires: + test_runner_features: allowed_warnings_regex - do: indices.create: index: test diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/health/10_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/health/10_usage.yml index 207b703677661..f576b318c719f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/health/10_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/health/10_usage.yml @@ -17,8 +17,8 @@ setup: feature: disk --- "Usage stats on the health API": - - skip: - version: "- 8.6.99" + - requires: + cluster_features: "gte_v8.7.0" reason: "the health api stats were only added to the usage api in 8.7" - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml index 28bdf22453c0a..4a1b2379888da 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml @@ -564,8 +564,7 @@ setup: --- "Test delete given model referenced by pipeline": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: ingest.put_pipeline: @@ -594,8 +593,7 @@ setup: --- "Test force delete given model referenced by pipeline": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: ingest.put_pipeline: @@ -625,8 +623,7 @@ setup: --- "Test delete given model with alias referenced by pipeline": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: ml.put_trained_model_alias: @@ -659,8 +656,7 @@ setup: --- "Test force delete given model with alias referenced by pipeline": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/106652" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/106652" - do: ml.put_trained_model_alias: model_alias: "alias-to-a-classification-model" @@ -1117,8 +1113,7 @@ setup: --- "Test put with defer_definition_decompression with invalid definition and no memory estimate": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/94854" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/94854" - do: catch: /Model \[my-regression-model\] inference config type \[classification\] does not support definition target type \[regression\]/ diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml index e307e72d2ca4f..dac7b48617a2f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml @@ -135,8 +135,7 @@ setup: --- "Test rescore with stored model": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: search: @@ -171,8 +170,7 @@ setup: --- "Test rescore with stored model and smaller window_size": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: search: @@ -193,8 +191,7 @@ setup: --- "Test rescore with stored model and chained rescorers": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: search: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml index 28a6ad826bc64..7991566bfe818 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml @@ -1,10 +1,10 @@ # This test uses the simple model defined in # TextExpansionQueryIT.java to create the token weights. setup: - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: "sparse_vector field type reintroduced in 8.11" - features: headers + test_runner_features: headers - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml index 5a31af18f8269..50a3fa7e22d58 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml @@ -1,10 +1,10 @@ # This test uses the simple model defined in # TextExpansionQueryIT.java to create the token weights. setup: - - skip: - features: headers - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: "text_expansion query introduced in 8.8" + test_runner_features: "headers" - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index 9df3731a09941..2aee382890c56 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -220,8 +220,8 @@ teardown: --- "Test topN functions from profiling-events": - - skip: - version: "- 8.13.99" + - requires: + cluster_features: "gte_v8.14.0" reason: "the topN functions API was added in 8.14.0" - do: @@ -251,8 +251,8 @@ teardown: --- "Test topN functions from test-events": - - skip: - version: "- 8.13.99" + - requires: + cluster_features: "gte_v8.14.0" reason: "the topN functions API was added in 8.14.0" - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml index edc79a8ebfc9e..db4ea4e8b205d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml @@ -29,7 +29,10 @@ teardown: security.delete_role: name: "backwards_role" ignore: 404 - + - do: + security.delete_role: + name: "role_with_description" + ignore: 404 --- "Test put role api": - do: @@ -83,3 +86,21 @@ teardown: - match: { admin_role.metadata.key2: "val2" } - match: { admin_role.indices.0.names.0: "*" } - match: { admin_role.indices.0.privileges.0: "all" } + + - do: + security.put_role: + name: "role_with_description" + body: > + { + "description": "Allows all security-related operations such as CRUD operations on users and roles and cache clearing.", + "cluster": ["manage_security"] + } + - match: { role: { created: true } } + + - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" + security.get_role: + name: "role_with_description" + - match: { role_with_description.cluster.0: "manage_security" } + - match: { role_with_description.description: "Allows all security-related operations such as CRUD operations on users and roles and cache clearing." } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml index cb81fe483c278..bd40e29d0b675 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -103,8 +103,7 @@ setup: "Test get all jobs": - skip: - version: all - reason: Job ordering isn't guaranteed right now, cannot test + awaits_fix: "Job ordering isn't guaranteed right now, cannot test" - do: headers: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml index 6560c6f470533..dd301c0a29f4f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml @@ -94,6 +94,11 @@ setup: status: job_state: "stopped" + - do: {xpack.usage: {}} + - match: { rollup.available: true } + - match: { rollup.enabled: true } + - match: { rollup.number_of_rollup_jobs: 1 } + --- "Test put_job with existing name": diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml index eaa5b8b42a840..b91b9053eac7e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml @@ -127,8 +127,8 @@ setup: --- "Test pinned query with knn query": - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml index 317a26cbfef52..0408167cbb656 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml @@ -210,8 +210,7 @@ setup: --- "Test geo_centroid aggregation on geo_shape shapes with grouping": - skip: - version: "all" - reason: "Awaits fix: https://github.com/elastic/elasticsearch/issues/95147" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/95147" - do: search: rest_total_hits_as_int: true diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml index 640f5af7b58c7..109e002f0aaa3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml @@ -141,8 +141,7 @@ teardown: --- "Test cat transform stats with batch transform": - skip: - version: "all" - reason: "Awaits fix: https://github.com/elastic/elasticsearch/issues/68350" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/68350" - do: transform.put_transform: transform_id: "airline-transform-batch" diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java index 0612648078edc..b2dc04c1178e4 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java @@ -43,7 +43,7 @@ public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 4; + public static final int REGISTRY_VERSION = 5; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 3930cfe6cd941..30323a1d7d363 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -47,7 +47,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 9; + public static final int REGISTRY_VERSION = 10; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting( @@ -107,6 +107,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // Kibana reporting template /////////////////////////////////// public static final String KIBANA_REPORTING_INDEX_TEMPLATE_NAME = ".kibana-reporting"; + public static final String KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME = "kibana-reporting@settings"; public StackTemplateRegistry( Settings nodeSettings, @@ -229,6 +230,13 @@ protected List getLifecyclePolicies() { REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE, ADDITIONAL_TEMPLATE_VARIABLES + ), + new IndexTemplateConfig( + KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + "/kibana-reporting@settings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ) )) { try { diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 782fe3b41ae3b..abb2d5765b128 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -429,6 +429,7 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { versions.put(StackTemplateRegistry.METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); + versions.put(StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); ClusterChangedEvent sameVersionEvent = createClusterChangedEvent(versions, nodes); client.setVerifier((action, request, listener) -> { if (action instanceof PutComponentTemplateAction) { @@ -484,6 +485,10 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) ); + versions.put( + StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) + ); ClusterChangedEvent higherVersionEvent = createClusterChangedEvent(versions, nodes); registry.clusterChanged(higherVersionEvent); } diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/painless/40_exception.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/painless/40_exception.yml index 7ecdc02eacd32..702b5eaafdba2 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/painless/40_exception.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/painless/40_exception.yml @@ -38,8 +38,10 @@ --- "Test painless exceptions are returned when logging a broken response": - skip: - version: "8.7.0 - 8.7.1" - reason: "self-referencing objects were in Painless instead of Mustache in 8.7.0 to 8.7.1" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.7.2" + reason: "self-referencing objects were in Painless instead of Mustache in 8.7.0 to 8.7.1" - do: cluster.health: diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/92_put_watch_with_indices_options.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/92_put_watch_with_indices_options.yml index eed10bdc179d4..d4964997f8c91 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/92_put_watch_with_indices_options.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/92_put_watch_with_indices_options.yml @@ -6,9 +6,12 @@ setup: --- "Test put watch with allow no indices": + - requires: + test_runner_features: ["warnings"] - skip: - features: ["warnings"] - version: "7.10.1 - 7.10.2" + known_issues: + - cluster_feature: "gte_v7.10.1" + fixed_by: "gte_v7.10.3" reason: "watch parsing with partial indices options was broken in 7.10.1 and 7.10.2" - do: watcher.put_watch: @@ -63,9 +66,12 @@ setup: --- "Test put watch with expand wildcards": + - requires: + test_runner_features: ["warnings"] - skip: - features: ["warnings"] - version: "7.10.1 - 7.10.2" + known_issues: + - cluster_feature: "gte_v7.10.1" + fixed_by: "gte_v7.10.3" reason: "watch parsing with partial indices options was broken in 7.10.1 and 7.10.2" - do: watcher.put_watch: @@ -120,9 +126,12 @@ setup: --- "Test put watch with ignore unavailable": + - requires: + test_runner_features: ["warnings"] - skip: - features: ["warnings"] - version: "7.10.1 - 7.10.2" + known_issues: + - cluster_feature: "gte_v7.10.1" + fixed_by: "gte_v7.10.3" reason: "watch parsing with partial indices options was broken in 7.10.1 and 7.10.2" - do: watcher.put_watch: diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/usage/10_basic.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/usage/10_basic.yml index 17031abf39e02..e37e78ab772ca 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/usage/10_basic.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/usage/10_basic.yml @@ -1,8 +1,7 @@ --- "Test watcher usage stats output": - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/65547" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/65547" - do: catch: missing watcher.delete_watch: diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 84c8b0bd95b4f..8a775c7f7d3d8 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.test.SecuritySettingsSourceField; @@ -44,6 +43,11 @@ import java.util.function.Consumer; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -420,16 +424,15 @@ private static RoleDescriptor randomRoleDescriptor(boolean includeRemoteDescript return new RoleDescriptor( randomAlphaOfLengthBetween(3, 90), randomSubsetOf(Set.of("all", "monitor", "none")).toArray(String[]::new), - RoleDescriptorTests.randomIndicesPrivileges(0, 3, excludedPrivileges), - RoleDescriptorTests.randomApplicationPrivileges(), + randomIndicesPrivileges(0, 3, excludedPrivileges), + randomApplicationPrivileges(), null, generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(false), + randomRoleDescriptorMetadata(false), Map.of(), - includeRemoteDescriptors ? RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 3, excludedPrivileges) : null, - includeRemoteDescriptors - ? RoleDescriptorTests.randomRemoteClusterPermissions(randomIntBetween(1, 3)) - : RemoteClusterPermissions.NONE, + includeRemoteDescriptors ? randomRemoteIndicesPrivileges(1, 3, excludedPrivileges) : null, + includeRemoteDescriptors ? randomRemoteClusterPermissions(randomIntBetween(1, 3)) : RemoteClusterPermissions.NONE, + null, null ); } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java new file mode 100644 index 0000000000000..4f4ff1d5743ee --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java @@ -0,0 +1,268 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.upgrades; + +import org.apache.http.HttpHost; +import org.elasticsearch.Build; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.test.XContentTestUtils; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class RolesBackwardsCompatibilityIT extends AbstractUpgradeTestCase { + + private RestClient oldVersionClient = null; + private RestClient newVersionClient = null; + + public void testCreatingAndUpdatingRoles() throws Exception { + assumeTrue( + "The role description is supported after transport version: " + TransportVersions.SECURITY_ROLE_DESCRIPTION, + minimumTransportVersion().before(TransportVersions.SECURITY_ROLE_DESCRIPTION) + ); + switch (CLUSTER_TYPE) { + case OLD -> { + // Creating role in "old" cluster should succeed when description is not provided + final String initialRole = randomRoleDescriptorSerialized(false); + createRole(client(), "my-old-role", initialRole); + updateRole("my-old-role", randomValueOtherThan(initialRole, () -> randomRoleDescriptorSerialized(false))); + + // and fail if we include description + var createException = expectThrows( + Exception.class, + () -> createRole(client(), "my-invalid-old-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + createException.getMessage(), + containsString("failed to parse role [my-invalid-old-role]. unexpected field [description]") + ); + + RestClient client = client(); + var updateException = expectThrows( + Exception.class, + () -> updateRole(client, "my-old-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + updateException.getMessage(), + containsString("failed to parse role [my-old-role]. unexpected field [description]") + ); + } + case MIXED -> { + try { + this.createClientsByVersion(); + // succeed when role description is not provided + final String initialRole = randomRoleDescriptorSerialized(false); + createRole(client(), "my-valid-mixed-role", initialRole); + updateRole("my-valid-mixed-role", randomValueOtherThan(initialRole, () -> randomRoleDescriptorSerialized(false))); + + // against old node, fail when description is provided either in update or create request + { + Exception e = expectThrows( + Exception.class, + () -> updateRole(oldVersionClient, "my-valid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + allOf(containsString("failed to parse role"), containsString("unexpected field [description]")) + ); + } + { + Exception e = expectThrows( + Exception.class, + () -> createRole(oldVersionClient, "my-invalid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + containsString("failed to parse role [my-invalid-mixed-role]. unexpected field [description]") + ); + } + + // and against new node in a mixed cluster we should fail + { + Exception e = expectThrows( + Exception.class, + () -> createRole(newVersionClient, "my-invalid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + containsString( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ) + ); + } + { + Exception e = expectThrows( + Exception.class, + () -> updateRole(newVersionClient, "my-valid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + containsString( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ) + ); + } + } finally { + this.closeClientsByVersion(); + } + } + case UPGRADED -> { + // on upgraded cluster which supports new description field + // create/update requests should succeed either way (with or without description) + final String initialRole = randomRoleDescriptorSerialized(randomBoolean()); + createRole(client(), "my-valid-upgraded-role", initialRole); + updateRole( + "my-valid-upgraded-role", + randomValueOtherThan(initialRole, () -> randomRoleDescriptorSerialized(randomBoolean())) + ); + } + } + } + + private void createRole(RestClient client, String roleName, String role) throws IOException { + final Request createRoleRequest = new Request("POST", "_security/role/" + roleName); + createRoleRequest.setJsonEntity(role); + var createRoleResponse = client.performRequest(createRoleRequest); + assertOK(createRoleResponse); + } + + private void updateRole(String roleName, String payload) throws IOException { + updateRole(client(), roleName, payload); + } + + private void updateRole(RestClient client, String roleName, String payload) throws IOException { + final Request updateRequest = new Request("PUT", "_security/role/" + roleName); + updateRequest.setJsonEntity(payload); + boolean created = assertOKAndCreateObjectPath(client.performRequest(updateRequest)).evaluate("role.created"); + assertThat(created, equalTo(false)); + } + + private static String randomRoleDescriptorSerialized(boolean includeDescription) { + try { + return XContentTestUtils.convertToXContent( + XContentTestUtils.convertToMap(randomRoleDescriptor(includeDescription)), + XContentType.JSON + ).utf8ToString(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private boolean nodeSupportRoleDescription(Map nodeDetails) { + String nodeVersionString = (String) nodeDetails.get("version"); + TransportVersion transportVersion = getTransportVersionWithFallback( + nodeVersionString, + nodeDetails.get("transport_version"), + () -> TransportVersions.ZERO + ); + + if (transportVersion.equals(TransportVersions.ZERO)) { + // In cases where we were not able to find a TransportVersion, a pre-8.8.0 node answered about a newer (upgraded) node. + // In that case, the node will be current (upgraded), and remote indices are supported for sure. + var nodeIsCurrent = nodeVersionString.equals(Build.current().version()); + assertTrue(nodeIsCurrent); + return true; + } + return transportVersion.onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION); + } + + private void createClientsByVersion() throws IOException { + var clientsByCapability = getRestClientByCapability(); + if (clientsByCapability.size() == 2) { + for (Map.Entry client : clientsByCapability.entrySet()) { + if (client.getKey() == false) { + oldVersionClient = client.getValue(); + } else { + newVersionClient = client.getValue(); + } + } + assertThat(oldVersionClient, notNullValue()); + assertThat(newVersionClient, notNullValue()); + } else { + fail("expected 2 versions during rolling upgrade but got: " + clientsByCapability.size()); + } + } + + private void closeClientsByVersion() throws IOException { + if (oldVersionClient != null) { + oldVersionClient.close(); + oldVersionClient = null; + } + if (newVersionClient != null) { + newVersionClient.close(); + newVersionClient = null; + } + } + + @SuppressWarnings("unchecked") + private Map getRestClientByCapability() throws IOException { + Response response = client().performRequest(new Request("GET", "_nodes")); + assertOK(response); + ObjectPath objectPath = ObjectPath.createFromResponse(response); + Map nodesAsMap = objectPath.evaluate("nodes"); + Map> hostsByCapability = new HashMap<>(); + for (Map.Entry entry : nodesAsMap.entrySet()) { + Map nodeDetails = (Map) entry.getValue(); + var capabilitySupported = nodeSupportRoleDescription(nodeDetails); + Map httpInfo = (Map) nodeDetails.get("http"); + hostsByCapability.computeIfAbsent(capabilitySupported, k -> new ArrayList<>()) + .add(HttpHost.create((String) httpInfo.get("publish_address"))); + } + Map clientsByCapability = new HashMap<>(); + for (var entry : hostsByCapability.entrySet()) { + clientsByCapability.put(entry.getKey(), buildClient(restClientSettings(), entry.getValue().toArray(new HttpHost[0]))); + } + return clientsByCapability; + } + + private static RoleDescriptor randomRoleDescriptor(boolean includeDescription) { + final Set excludedPrivileges = Set.of( + "cross_cluster_replication", + "cross_cluster_replication_internal", + "manage_data_stream_lifecycle" + ); + return new RoleDescriptor( + randomAlphaOfLengthBetween(3, 90), + randomSubsetOf(Set.of("all", "monitor", "none")).toArray(String[]::new), + randomIndicesPrivileges(0, 3, excludedPrivileges), + randomApplicationPrivileges(), + null, + generateRandomStringArray(5, randomIntBetween(2, 8), false, true), + randomRoleDescriptorMetadata(false), + Map.of(), + null, + null, + null, + includeDescription ? randomAlphaOfLength(20) : null + ); + } +} diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml index 0330b12663a41..079714ea3a886 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml @@ -17,11 +17,11 @@ --- "Test old cluster datafeed with aggs": + - requires: + test_runner_features: "warnings" - skip: - features: warnings #TODO remove skip when master is bumped to 9.0.0 - version: "all" - reason: "If we hit the old node we get a warning. If we hit the new node, we don't" + awaits_fix: "If we hit the old node we get a warning. If we hit the new node, we don't" - do: warnings: - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 17363d58545c2..3d9e7f3828bc7 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -20,14 +20,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslVerificationMode; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.fixtures.smb.SmbTestContainer; import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -190,16 +187,11 @@ public void setupRoleMappings() throws Exception { Map> futures = Maps.newLinkedHashMapWithExpectedSize(content.size()); for (int i = 0; i < content.size(); i++) { final String name = "external_" + i; - final PutRoleMappingRequestBuilder builder; - try ( - XContentParser parser = XContentHelper.createParserNotCompressed( - LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, - new BytesArray(content.get(i)), - XContentType.JSON - ) - ) { - builder = new PutRoleMappingRequestBuilder(client()).source(name, parser); - } + final PutRoleMappingRequestBuilder builder = new PutRoleMappingRequestBuilder(client()).source( + name, + new BytesArray(content.get(i)), + XContentType.JSON + ); futures.put(name, builder.execute()); } for (String mappingName : futures.keySet()) {