diff --git a/klab.cli/src/main/java/org/integratedmodelling/cli/KlabCLI.java b/klab.cli/src/main/java/org/integratedmodelling/cli/KlabCLI.java index 20f1cb63b..a74e2e6f0 100644 --- a/klab.cli/src/main/java/org/integratedmodelling/cli/KlabCLI.java +++ b/klab.cli/src/main/java/org/integratedmodelling/cli/KlabCLI.java @@ -1,5 +1,11 @@ package org.integratedmodelling.cli; +import java.io.*; +import java.net.URI; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.*; +import java.util.function.Supplier; import org.integratedmodelling.cli.views.CLIObservationView; import org.integratedmodelling.cli.views.CLIReasonerView; import org.integratedmodelling.cli.views.CLIResourcesView; @@ -10,7 +16,6 @@ import org.integratedmodelling.klab.api.engine.Engine; import org.integratedmodelling.klab.api.exceptions.KlabIOException; import org.integratedmodelling.klab.api.exceptions.KlabIllegalStateException; -import org.integratedmodelling.klab.api.knowledge.Urn; import org.integratedmodelling.klab.api.scope.ContextScope; import org.integratedmodelling.klab.api.scope.Scope; import org.integratedmodelling.klab.api.scope.Scope.Status; @@ -40,881 +45,1021 @@ import picocli.shell.jline3.PicocliCommands; import picocli.shell.jline3.PicocliCommands.PicocliCommandsFactory; -import java.io.*; -import java.net.URI; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.*; -import java.util.function.Supplier; - /** * Command line for the next k.LAB. Provides a Modeler controlled by command line instructions. - *

- * Commands can be bare Runnables or the specialized FunctionalCommand, which manages a stack of values that - * the command execution can push. Any pushed values are matched into a global stack, and they can be referred - * as $ (equivalent to $0) or $n (n = depth into stack) by commands that are prepared to receive them. - * Commands that push variables into the stack should notify that to the user. - *

- * TESTING SETUP + * + *

Commands can be bare Runnables or the specialized FunctionalCommand, which manages a stack of + * values that the command execution can push. Any pushed values are matched into a global stack, + * and they can be referred as $ (equivalent to $0) or $n (n = depth into stack) by commands that + * are prepared to receive them. Commands that push variables into the stack should notify that to + * the user. + * + *

TESTING SETUP * ============================================================================================== - *

- * Run in terminal from the project dir after "mvn install" as java -cp + * + *

Run in terminal from the project dir after "mvn install" as java -cp * "target/kcli-0.11.0-SNAPSHOT.jar;target/lib/*" org.integratedmodelling.kcli.KlabCLI * . - *

- * A useful alias for bash is * - * alias klab="java -cp "target/kcli-0.11.0-SNAPSHOT.jar;target/lib/*" + *

A useful alias for bash is + * alias klab="java -cp "target/kcli-0.11.0-SNAPSHOT.jar;target/lib/*" * -Xmx4096M -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=8000 * org.integratedmodelling.kcli.KlabCLI" - *

- * TODO revise around the {@link org.integratedmodelling.klab.api.view.modeler.Modeler} and provide - * CLI-versions of each view instead of making up commands. Should be - *

- * resources, services, statistics, report, distribution, knowledge, events, debug and context. + * + *

TODO revise around the {@link org.integratedmodelling.klab.api.view.modeler.Modeler} and + * provide CLI-versions of each view instead of making up commands. Should be + * + *

resources, services, statistics, report, distribution, knowledge, events, debug and context. */ public enum KlabCLI { - - INSTANCE; - - private String prompt = "k.LAB> "; - private ModelerImpl modeler; - private LineReader reader; - private CLIStartupOptions options; - private CommandLine commandLine; - - public Engine engine() { - return modeler.engine(); + INSTANCE; + + private String prompt = "k.LAB> "; + private ModelerImpl modeler; + private LineReader reader; + private CLIStartupOptions options; + private CommandLine commandLine; + + public Engine engine() { + return modeler.engine(); + } + + public UserScope user() { + return modeler.user(); + } + + public ModelerImpl modeler() { + return this.modeler; + } + + private String getContextPrompt() { + String ret = null; + if (modeler.getCurrentContext() != null) { + ret = modeler.getCurrentSession().getName() + "/" + modeler.getCurrentContext().getName(); + if (modeler.getCurrentContext().getContextObservation() != null) { + ret += "/" + modeler.getCurrentContext().getContextObservation().getName(); + } + if (modeler.getCurrentContext().getObserver() != null) { + ret = modeler.getCurrentContext().getObserver().getName() + "@" + ret; + } + } else if (modeler.getCurrentSession() != null) { + ret = modeler.getCurrentSession().getName(); } - - public UserScope user() { - return modeler.user(); + return ret; + } + + public void exportWithSchema(KlabService service, List arguments) {} + + public void importWithSchema(KlabService service, String suggestedUrn, List arguments) { + + ResourceTransport.Schema schema = null; + String result = null; + if (arguments == null || arguments.isEmpty()) { + schema = chooseSchemaInteractively(service.capabilities(user()).getImportSchemata()); + if (schema != null) { + arguments = defineInteractively(service, schema); + arguments.addFirst(""); + } + } else { + schema = + ResourceTransport.INSTANCE.findSchema( + arguments.getFirst(), service.capabilities(user()).getImportSchemata(), user()); } - public ModelerImpl modeler() { - return this.modeler; - } + if (schema == null) { + commandLine.getErr().println("No schema found with ID " + arguments.getFirst()); + } else if (schema.getType() == ResourceTransport.Schema.Type.STREAM && arguments.size() == 2) { - private String getContextPrompt() { - String ret = null; - if (modeler.getCurrentContext() != null) { - ret = modeler.getCurrentSession().getName() + "/" + modeler.getCurrentContext().getName(); - if (modeler.getCurrentContext().getContextObservation() != null) { - ret += "/" + modeler.getCurrentContext().getContextObservation().getName(); - } - if (modeler.getCurrentContext().getObserver() != null) { - ret = modeler.getCurrentContext().getObserver().getName() + "@" + ret; - } - } else if (modeler.getCurrentSession() != null) { - ret = modeler.getCurrentSession().getName(); + if (arguments.get(1).contains("://")) { + try { + var url = new URI(arguments.get(1)).toURL(); + result = service.importAsset(schema, schema.asset(url), suggestedUrn, user()); + } catch (Exception e) { + commandLine + .getErr() + .println( + "Import failed with exception:" + + org.integratedmodelling.klab.api.utils.Utils.Exceptions.stackTrace(e)); + return; } - return ret; - } - - public void exportWithSchema(KlabService service, List arguments) { - - } - - public void importWithSchema(KlabService service, String suggestedUrn, List arguments) { - - ResourceTransport.Schema schema = null; - String result = null; - if (arguments == null || arguments.isEmpty()) { - schema = chooseSchemaInteractively(service.capabilities(user()).getImportSchemata()); - if (schema != null) { - arguments = defineInteractively(service, schema); - arguments.addFirst(""); - } - } else { - schema = ResourceTransport.INSTANCE.findSchema(arguments.getFirst(), - service.capabilities(user()).getImportSchemata(), user()); + } else { + File file = new File(arguments.get(1)); + if (file.exists()) { + try { + result = service.importAsset(schema, schema.asset(file), suggestedUrn, user()); + } catch (Exception e) { + commandLine + .getErr() + .println("Import failed with exception:" + Utils.Exceptions.stackTrace(e)); + return; + } } - - if (schema == null) { - commandLine.getErr().println("No schema found with ID " + arguments.getFirst()); - } else if (schema.getType() == ResourceTransport.Schema.Type.STREAM && arguments.size() == 2) { - - if (arguments.get(1).contains("://")) { - try { - var url = new URI(arguments.get(1)).toURL(); - result = service.importAsset(schema, schema.asset(url), suggestedUrn, user()); - } catch (Exception e) { - commandLine.getErr().println("Import failed with exception:" + org.integratedmodelling.klab.api.utils.Utils.Exceptions.stackTrace(e)); - return; - } - } else { - File file = new File(arguments.get(1)); - if (file.exists()) { - try { - result = service.importAsset(schema, schema.asset(file), suggestedUrn, user()); - } catch (Exception e) { - commandLine.getErr().println("Import failed with exception:" + Utils.Exceptions.stackTrace(e)); - return; - } - } - } - } else { - if (schema.getType() == ResourceTransport.Schema.Type.PROPERTIES && arguments.size() == schema.getProperties().size() + 1) { - var params = org.integratedmodelling.klab.api.collections.Parameters.create(); - int i = 0; - for (var property : schema.getProperties().keySet()) { - var descriptor = schema.getProperties().get(property); - var argValue = arguments.get(i + 1); - if (!"_".equals(argValue)) { - params.put(property, - Utils.Data.convertValue(argValue, - descriptor.type())); - } else if (descriptor.defaultValue() != null) { - params.put(property, descriptor.defaultValue()); - } else if (!descriptor.optional()) { - commandLine.getErr().println("Property " + property + " is mandatory and must be " + - "supplied"); - return; - } - i++; - } - try { - result = service.importAsset(schema, schema.asset(params), suggestedUrn, user()); - } catch (Throwable t) { - commandLine.getErr().println("Import failed with exception:" + Utils.Exceptions.stackTrace(t)); - return; - } - } else { - commandLine.getErr().println("Import call with schema ID " + arguments.getFirst() + " " + - "failed: argument mismatch"); - return; - } + } + } else { + if (schema.getType() == ResourceTransport.Schema.Type.PROPERTIES + && arguments.size() == schema.getProperties().size() + 1) { + var params = org.integratedmodelling.klab.api.collections.Parameters.create(); + int i = 0; + for (var property : schema.getProperties().keySet()) { + var descriptor = schema.getProperties().get(property); + var argValue = arguments.get(i + 1); + if (!"_".equals(argValue)) { + params.put(property, Utils.Data.convertValue(argValue, descriptor.type())); + } else if (descriptor.defaultValue() != null) { + params.put(property, descriptor.defaultValue()); + } else if (!descriptor.optional()) { + commandLine + .getErr() + .println("Property " + property + " is mandatory and must be " + "supplied"); + return; + } + i++; } - - if (result == null) { - commandLine.getErr().println("Import was rejected by service (URN is null)"); - } else { - commandLine.getOut().println("Import to service succeeded: URN is " + result); + try { + result = service.importAsset(schema, schema.asset(params), suggestedUrn, user()); + } catch (Throwable t) { + commandLine + .getErr() + .println("Import failed with exception:" + Utils.Exceptions.stackTrace(t)); + return; } - + } else { + commandLine + .getErr() + .println( + "Import call with schema ID " + + arguments.getFirst() + + " " + + "failed: argument mismatch"); + return; + } } - public ResourceTransport.Schema chooseSchemaInteractively - (Map> schemata) { - - List> choices = new ArrayList<>(); - int n = 1; - commandLine.getOut().println(Ansi.AUTO.string("Choose a transport schema:")); - for (String key : schemata.keySet()) { - for (var schema : schemata.get(key)) { - commandLine.getOut().println(Ansi.AUTO.string(" " + (n++) + ": @|green " + schema.getSchemaId() + "|@ " + - "@|yellow " + schema.getDescription() + "|@")); - choices.add(Pair.of(key, schema)); - } - } - var line = reader.readLine(Ansi.AUTO.string("@|yellow Schema #:|@ "), "", (MaskingCallback) null, + if (result == null) { + commandLine.getErr().println("Import was rejected by service (URN is null)"); + } else { + commandLine.getOut().println("Import to service succeeded: URN is " + result); + } + } + + public ResourceTransport.Schema chooseSchemaInteractively( + Map> schemata) { + + List> choices = new ArrayList<>(); + int n = 1; + commandLine.getOut().println(Ansi.AUTO.string("Choose a transport schema:")); + for (String key : schemata.keySet()) { + for (var schema : schemata.get(key)) { + commandLine + .getOut() + .println( + Ansi.AUTO.string( + " " + + (n++) + + ": @|green " + + schema.getSchemaId() + + "|@ " + + "@|yellow " + + schema.getDescription() + + "|@")); + choices.add(Pair.of(key, schema)); + } + } + var line = + reader.readLine( + Ansi.AUTO.string("@|yellow Schema #:|@ "), "", (MaskingCallback) null, null); + if (Utils.Numbers.encodesInteger(line.trim())) { + var index = Integer.parseInt(line.trim()) - 1; + if (index >= 0 && index < choices.size()) { + return choices.get(index).getSecond(); + } + } + return null; + } + + public List defineInteractively(KlabService service, ResourceTransport.Schema schema) { + + List ret = new ArrayList<>(); + + if (schema.getType() == ResourceTransport.Schema.Type.STREAM) { + var line = + reader.readLine( + Ansi.AUTO.string("@|yellow Enter file path or URL:|@ "), + "", + (MaskingCallback) null, + null); + if (line != null && !line.trim().isEmpty()) { + ret.add(line.trim()); + } + } else if (schema.getType() == ResourceTransport.Schema.Type.PROPERTIES) { + for (var property : schema.getProperties().values()) { + var line = + reader.readLine( + Ansi.AUTO.string( + "Value for @|yellow " + + property.name() + + "|@ " + + "[" + + (property.defaultValue() == null + ? (property.optional() ? "optional" : "mandatory") + : property.defaultValue()) + + "]: "), + "", + (MaskingCallback) null, null); - if (Utils.Numbers.encodesInteger(line.trim())) { - var index = Integer.parseInt(line.trim()) - 1; - if (index >= 0 && index < choices.size()) { - return choices.get(index).getSecond(); - } + if (line.trim().isEmpty()) { + ret.add("_"); + } else { + ret.add(line.trim()); } - return null; + } } - - public List defineInteractively(KlabService service, ResourceTransport.Schema schema) { - - List ret = new ArrayList<>(); - - if (schema.getType() == ResourceTransport.Schema.Type.STREAM) { - var line = reader.readLine(Ansi.AUTO.string("@|yellow Enter file path or URL:|@ "), "", - (MaskingCallback) null, null); - if (line != null && !line.trim().isEmpty()) { - ret.add(line.trim()); - } - } else if (schema.getType() == ResourceTransport.Schema.Type.PROPERTIES) { - for (var property : schema.getProperties().values()) { - var line = reader.readLine(Ansi.AUTO.string("Value for @|yellow " + property.name() + - "|@ " + - "[" + (property.defaultValue() == null ? (property.optional() ? "optional" - : - "mandatory") : - property.defaultValue()) + "]: "), - "", (MaskingCallback) null, null); - if (line.trim().isEmpty()) { - ret.add("_"); - } else { - ret.add(line.trim()); - } - } - } - return ret; + return ret; + } + + public T service(String service, Class serviceClass) { + if (service == null || "local".equals(service)) { + return user().getService(serviceClass); + } // TODO + return null; + } + + public boolean confirm(String prompt) { + commandLine.getOut().println(Ansi.AUTO.string("@|yellow " + prompt + "|@ (Y/n)?")); + var line = + reader.readLine(Ansi.AUTO.string("@|cyan Y/n:|@ "), "", (MaskingCallback) null, null); + return line == null || line.isEmpty() || line.trim().equalsIgnoreCase("y"); + } + + /** Top-level command that just prints help. */ + @Command( + name = "", + description = { + "k.LAB interactive shell with completion and autosuggestions. " + + "Hit @|magenta |@ to see available commands.", + "Hit" + " " + "@|magenta ALT-S|@ to toggle tailtips.", + "" + }, + footer = {"", "Press Ctrl-D to exit."}, + subcommands = { + Auth.class, + Expressions.class, + CLIReasonerView.class, /*Report.class, Resolver + .class,*/ + Shutdown.class, + Credentials.class, + CLIServicesView.class, + Run.class, + PicocliCommands.ClearScreen.class, + CommandLine.HelpCommand.class, + Set.class, /*Session.class, + */ + CLIObservationView.class, + CLIResourcesView.class, + Components.class, + Test.class, + Run.Alias.class, + Run.Unalias.class + }) + static class CliCommands implements Runnable { + + PrintWriter out; + + public void setReader(LineReader reader) { + out = reader.getTerminal().writer(); } - public T service(String service, Class serviceClass) { - if (service == null || "local".equals(service)) { - return user().getService(serviceClass); - } // TODO - return null; + public void run() { + out.println(new CommandLine(this).getUsageMessage()); } - - public boolean confirm(String prompt) { - commandLine.getOut().println(Ansi.AUTO.string("@|yellow " + prompt + "|@ (Y/n)?")); - var line = reader.readLine(Ansi.AUTO.string("@|cyan Y/n:|@ "), "", (MaskingCallback) null, null); - return line == null || line.isEmpty() || line.trim().equalsIgnoreCase("y"); + } + + @Command( + name = "run", + mixinStandardHelpOptions = true, + description = { + "Run scripts, test cases and " + "applications.", + "Uses autocompletion for " + "behavior " + "and " + "test case " + "names.", + "" + }, + subcommands = {Run.List.class, Run.Purge.class}) + static class Run /* extends Monitor */ implements Runnable { + + java.util.Set running = new LinkedHashSet<>(); + + static Map aliases = new LinkedHashMap<>(); + + @Spec CommandSpec commandSpec; + + @Option( + names = {"-s", "--synchronous"}, + defaultValue = "false", + description = { + "Run in synchronous" + + " mode," + + " " + + "returning " + + "to the" + + " " + + "prompt" + + " " + + "when " + + "the " + + "script" + + " has" + + " finished " + + "running." + }, + required = false) + boolean synchronous; + + @Parameters( + description = { + "The full name of one or more script, test case or application.", + "If " + "not present locally, resolve through the k.LAB network." + }) + java.util.List scriptNames = new ArrayList<>(); + + public Run() {} + + public static void loadAliases() { + File aliasFile = + new File( + System.getProperty("user.home") + + File.separator + + ".klab" + + File.separator + + "kcli" + + File.separator + + "aliases.txt"); + if (!aliasFile.exists()) { + // Utils.Files.touch(aliasFile); + } + try (InputStream input = new FileInputStream(aliasFile)) { + Properties properties = new Properties(); + properties.load(input); + for (String property : properties.stringPropertyNames()) { + Run.aliases.put(property, properties.getProperty(property)); + } + } catch (Exception e) { + throw new KlabIOException(e); + } } - /** - * Top-level command that just prints help. - */ - @Command(name = "", description = {"k.LAB interactive shell with completion and autosuggestions. " + - "Hit @|magenta |@ to see available commands.", "Hit" + - " " + - "@|magenta ALT-S|@ to toggle tailtips.", ""}, footer = {"", - "Press Ctrl-D to exit."}, - subcommands = {Auth.class, Expressions.class, CLIReasonerView.class, /*Report.class, Resolver - .class,*/ - Shutdown.class, Credentials.class, CLIServicesView.class, Run.class, - PicocliCommands.ClearScreen.class, - CommandLine.HelpCommand.class, Set.class,/*Session.class, - */CLIObservationView.class, - CLIResourcesView.class, Components.class, Test.class, Run.Alias.class, - Run.Unalias.class}) - static class CliCommands implements Runnable { - - PrintWriter out; - - public void setReader(LineReader reader) { - out = reader.getTerminal().writer(); + public static void storeAliases() { + File aliasFile = + new File( + System.getProperty("user.home") + + File.separator + + ".klab" + + File.separator + + "kcli" + + File.separator + + "aliases.txt"); + try (OutputStream output = new FileOutputStream(aliasFile)) { + Properties properties = new Properties(); + for (String key : Run.aliases.keySet()) { + properties.setProperty(key, Run.aliases.get(key)); } + properties.store(output, "k.CLI alias file"); + } catch (Exception e) { + throw new KlabIOException(e); + } + } - public void run() { - out.println(new CommandLine(this).getUsageMessage()); + @Override + public void run() { + + PrintWriter out = commandSpec.commandLine().getOut(); + + if (scriptNames.isEmpty()) { + list(); + } else { + + for (String scriptName : scriptNames) { + + // KActorsBehavior behavior = Engine.INSTANCE.getCurrentUser(true, + // null) + // .getService(ResourcesService.class) + // .resolveBehavior(scriptName, Engine.INSTANCE + // .getCurrentUser()); + // + // if (behavior == null) { + // out.println(Ansi.AUTO.string("Behavior @|red " + + // scriptName + + // "|@ unknown or not " + + // "available")); + // } else { + // out.println(Ansi.AUTO.string("Running @|green " + scriptName + // + "|@...")); + // running.add(Engine.INSTANCE.getCurrentUser().run(scriptName, + // behavior.getType())); + // } } + } } - @Command(name = "run", mixinStandardHelpOptions = true, description = - {"Run scripts, test cases and " + "applications.", "Uses autocompletion for " + "behavior " + - "and " + - "test case " + "names.", ""}, subcommands = {Run.List.class, Run.Purge.class}) - static class Run /* extends Monitor */ implements Runnable { - - java.util.Set running = new LinkedHashSet<>(); - - static Map aliases = new LinkedHashMap<>(); - - @Spec - CommandSpec commandSpec; - - @Option(names = {"-s", "--synchronous"}, defaultValue = "false", description = {"Run in synchronous" + - " mode," + - " " + - "returning " + - "to the" + - " " + - "prompt" + - " " + - "when " + - "the " + - "script" + - " has" + - " finished " + - "running."} - , required = false) - boolean synchronous; - - @Parameters(description = {"The full name of one or more script, test case or application.", - "If " + - "not present locally, resolve through the k.LAB network."}) - java.util.List scriptNames = new ArrayList<>(); - - public Run() { - } + public void list() { - public static void loadAliases() { - File aliasFile = - new File(System.getProperty("user.home") + File.separator + ".klab" + File.separator + - "kcli" + File.separator + "aliases.txt"); - if (!aliasFile.exists()) { - // Utils.Files.touch(aliasFile); - } - try (InputStream input = new FileInputStream(aliasFile)) { - Properties properties = new Properties(); - properties.load(input); - for (String property : properties.stringPropertyNames()) { - Run.aliases.put(property, properties.getProperty(property)); - } - } catch (Exception e) { - throw new KlabIOException(e); - } - } + int n = 1; + for (SessionScope scope : running) { + commandSpec + .commandLine() + .getOut() + .println(" " + n++ + ". " + scope.getName() + " [" + scope.getStatus() + "]"); + } + } - public static void storeAliases() { - File aliasFile = - new File(System.getProperty("user.home") + File.separator + ".klab" + File.separator + - "kcli" + File.separator + "aliases.txt"); - try (OutputStream output = new FileOutputStream(aliasFile)) { - Properties properties = new Properties(); - for (String key : Run.aliases.keySet()) { - properties.setProperty(key, Run.aliases.get(key)); - } - properties.store(output, "k.CLI alias file"); - } catch (Exception e) { - throw new KlabIOException(e); - } - } + @Command( + name = "list", + mixinStandardHelpOptions = true, + description = {"List all running " + "behaviors" + "."}) + static class List implements Runnable { - @Override - public void run() { + @ParentCommand Run parent; - PrintWriter out = commandSpec.commandLine().getOut(); - - if (scriptNames.isEmpty()) { - list(); - } else { - - for (String scriptName : scriptNames) { - - // KActorsBehavior behavior = Engine.INSTANCE.getCurrentUser(true, - // null) - // .getService(ResourcesService.class) - // .resolveBehavior(scriptName, Engine.INSTANCE - // .getCurrentUser()); - // - // if (behavior == null) { - // out.println(Ansi.AUTO.string("Behavior @|red " + - // scriptName + - // "|@ unknown or not " + - // "available")); - // } else { - // out.println(Ansi.AUTO.string("Running @|green " + scriptName - // + "|@...")); - // running.add(Engine.INSTANCE.getCurrentUser().run(scriptName, - // behavior.getType())); - // } - } - } - } + @Override + public void run() { + parent.list(); + } + } - public void list() { + @Command( + name = "alias", + mixinStandardHelpOptions = true, + description = {"Define an alias for a " + "command.", "Use @x to store option -x"}, + subcommands = {Alias.List.class, Alias.Clear.class}) + static class Alias implements Runnable { - int n = 1; - for (SessionScope scope : running) { - commandSpec.commandLine().getOut().println(" " + n++ + ". " + scope.getName() + " [" + scope.getStatus() + "]"); - } + @Command( + name = "list", + mixinStandardHelpOptions = true, + description = {"List all aliases."}) + static class List implements Runnable { + @Spec CommandSpec commandSpec; + @Override + public void run() { + for (String alias : Run.aliases.keySet()) { + commandSpec + .commandLine() + .getOut() + .println( + Ansi.AUTO.string( + "@|bold " + alias + "|@: " + "@|green " + Run.aliases.get(alias) + "|@")); + } } + } - @Command(name = "list", mixinStandardHelpOptions = true, description = {"List all running " + - "behaviors" + - "."}) - static class List implements Runnable { - - @ParentCommand - Run parent; - - @Override - public void run() { - parent.list(); - } + @Command( + name = "clear", + mixinStandardHelpOptions = true, + description = {"Remove all aliases."}) + static class Clear implements Runnable { + @Spec CommandSpec commandSpec; + @Override + public void run() { + int nal = Run.aliases.size(); + Run.aliases.clear(); + Run.storeAliases(); + commandSpec.commandLine().getOut().println(nal + " aliases removed"); } - - @Command(name = "alias", mixinStandardHelpOptions = true, description = - {"Define an alias for a " + "command.", "Use @x to store option -x"}, subcommands = - {Alias.List.class, Alias.Clear.class}) - static class Alias implements Runnable { - - @Command(name = "list", mixinStandardHelpOptions = true, description = {"List all aliases."}) - static class List implements Runnable { - @Spec - CommandSpec commandSpec; - - @Override - public void run() { - for (String alias : Run.aliases.keySet()) { - commandSpec.commandLine().getOut().println(Ansi.AUTO.string("@|bold " + alias + - "|@: " + "@|green " + Run.aliases.get(alias) + "|@")); - } - } - } - - @Command(name = "clear", mixinStandardHelpOptions = true, description = {"Remove all aliases."}) - static class Clear implements Runnable { - @Spec - CommandSpec commandSpec; - - @Override - public void run() { - int nal = Run.aliases.size(); - Run.aliases.clear(); - Run.storeAliases(); - commandSpec.commandLine().getOut().println(nal + " aliases removed"); - } - } - - @Parameters(defaultValue = Parameters.NULL_VALUE) - java.util.List arguments; - @Spec - CommandSpec commandSpec; - - @Override - public void run() { - - if (arguments == null || arguments.size() == 0) { - for (String alias : Run.aliases.keySet()) { - commandSpec.commandLine().getOut().println(Ansi.AUTO.string("@|bold " + alias + - "|@: " + "@|green " + Run.aliases.get(alias) + "|@")); - } - return; - } - - if (arguments.size() < 2) { - throw new KlabIllegalStateException("Must name an alias and its value"); - } - String alias = arguments.get(0); - for (int i = 1; i < arguments.size(); i++) { - if (arguments.get(i).startsWith("@")) { - arguments.set(i, "-" + arguments.get(i).substring(1)); - } - } - String value = Utils.Strings.join(arguments.subList(1, arguments.size()), " "); - Run.aliases.put(alias, value); - Run.storeAliases(); - } + } + + @Parameters(defaultValue = Parameters.NULL_VALUE) + java.util.List arguments; + + @Spec CommandSpec commandSpec; + + @Override + public void run() { + + if (arguments == null || arguments.size() == 0) { + for (String alias : Run.aliases.keySet()) { + commandSpec + .commandLine() + .getOut() + .println( + Ansi.AUTO.string( + "@|bold " + alias + "|@: " + "@|green " + Run.aliases.get(alias) + "|@")); + } + return; } - @Command(name = "unalias", mixinStandardHelpOptions = true, description = {"Remove a command " + - "alias."}) - static class Unalias implements Runnable { - - @Parameters - String alias; - - @Override - public void run() { - Run.aliases.remove(alias); - Run.storeAliases(); - } - + if (arguments.size() < 2) { + throw new KlabIllegalStateException("Must name an alias and its value"); } - - @Command(name = "purge", mixinStandardHelpOptions = true, description = {"Remove finished or " + - "aborted " + - "behaviors" + - " from the " + - "list."}) - static class Purge implements Runnable { - - @Parameters(description = {"The numeric ID of the scripts we want to purge. No argument " + - "removes" + - " all that have " + "finished.", "Run \"run list\" to " + - "know " + - "the IDs."}) - java.util.List appIds = new ArrayList<>(); - - @ParentCommand - Run parent; - - @Override - public void run() { - if (appIds.isEmpty()) { - java.util.Set removed = new HashSet<>(); - for (SessionScope s : parent.running) { - if (s.getStatus() != Status.STARTED && s.getStatus() != Status.WAITING) { - s.close(); - removed.add(s); - } - } - parent.running.removeAll(removed); - } else { - java.util.List scopes = new ArrayList<>(parent.running); - for (int appId : appIds) { - SessionScope s = scopes.get(appId + 1); - s.close(); - parent.running.remove(s); - } - } - parent.list(); - } + String alias = arguments.get(0); + for (int i = 1; i < arguments.size(); i++) { + if (arguments.get(i).startsWith("@")) { + arguments.set(i, "-" + arguments.get(i).substring(1)); + } } + String value = Utils.Strings.join(arguments.subList(1, arguments.size()), " "); + Run.aliases.put(alias, value); + Run.storeAliases(); + } } - public static void main(String[] args) { - // AnsiConsole.systemInstall(); - - INSTANCE.options = CLIStartupOptions.create(args); - - try { - - // create the modeler - INSTANCE.modeler = new CommandLineModeler(); - // Configure messages for CLI use - INSTANCE.modeler.setOption(ModelerImpl.Option.UseAnsiEscapeSequences, true); - - - Supplier workDir = () -> Paths.get(System.getProperty("user.home") + File.separator + - ".klab" + File.separator + "kcli"); - - // jline built-in commands - workDir.get().toFile().mkdirs(); - ConfigurationPath configPath = new ConfigurationPath(workDir.get(), workDir.get()); - Builtins builtins = new Builtins(workDir, configPath, null); - builtins.rename(Builtins.Command.TTOP, "top"); - builtins.alias("zle", "widget"); - builtins.alias("bindkey", "keymap"); - - // picocli - CliCommands commands = new CliCommands(); - PicocliCommandsFactory factory = new PicocliCommandsFactory(); - INSTANCE.commandLine = new CommandLine(commands, factory); - PicocliCommands picocliCommands = new PicocliCommands(INSTANCE.commandLine); - File historyFile = new File(Configuration.INSTANCE.getDataPath() + File.separator + "kcli" + - ".history"); - Parser parser = new DefaultParser(); - try (Terminal terminal = TerminalBuilder.builder().build()) { - - SystemRegistry systemRegistry = new SystemRegistryImpl(parser, terminal, workDir, null); - systemRegistry.setCommandRegistries(builtins, picocliCommands); - systemRegistry.register("help", picocliCommands); - KlabCompleter completer = new KlabCompleter(systemRegistry.completer()); - History history = new DefaultHistory(); - INSTANCE.reader = - LineReaderBuilder.builder().terminal(terminal).completer(completer).parser(parser).variable(LineReader.LIST_MAX, 50) // candidates - .history(history).build(); - - builtins.setLineReader(INSTANCE.reader); - commands.setReader(INSTANCE.reader); - factory.setTerminal(terminal); - history.attach(INSTANCE.reader); - - TailTipWidgets widgets = new TailTipWidgets(INSTANCE.reader, - systemRegistry::commandDescription, 5, - TailTipWidgets.TipType.COMPLETER); - widgets.enable(); - KeyMap keyMap = INSTANCE.reader.getKeyMaps().get("main"); - keyMap.bind(new Reference("tailtip-toggle"), KeyMap.alt("s")); - - /** - * If we have a command, run it and exit - * FIXME use options field - */ - if (args != null && args.length > 0) { - String line = Utils.Strings.join(args, ' '); - try { - systemRegistry.execute(line); - } catch (Throwable t) { - t.printStackTrace(); - System.exit(0xff); - } - System.exit(0); - } + @Command( + name = "unalias", + mixinStandardHelpOptions = true, + description = {"Remove a command " + "alias."}) + static class Unalias implements Runnable { - if (historyFile.exists()) { - history.read(historyFile.toPath(), true); - } + @Parameters String alias; - Run.loadAliases(); - - // boot the engine. This will schedule processes so it wont'delay startup. - INSTANCE.modeler.boot(); - - // start the shell and process input until the user quits with Ctrl-D - String line; - while (true) { - try { - - systemRegistry.cleanUp(); - line = INSTANCE.reader.readLine(INSTANCE.prompt, INSTANCE.getContextPrompt(), - (MaskingCallback) null, null); - completer.resetSemanticSearch(); - boolean aliased = false; - - /* - * Use <, >, .. to move through context observations, @ to set/reset the - * observer and - * ./? to inquire about the current context in detail. The right prompt - * summarizes - * the current context focus. - */ - if (line.trim().startsWith(".") || line.trim().startsWith("<") || line.trim().startsWith("@") || line.trim().startsWith(">") || line.trim().startsWith("?")) { - INSTANCE.setFocalScope(line.trim()); - continue; - } else if (line.trim().startsWith("-")) { - if (line.trim().equals("-") && history.size() > 0) { - line = history.get(history.last() - 1); - aliased = true; - } else if (org.integratedmodelling.klab.api.utils.Utils.Numbers.encodesInteger(line.trim().substring(1))) { - int n = Integer.parseInt(line.trim().substring(1)); - if (history.size() > n) { - line = history.get(history.last() - n); - aliased = true; - } - } - } else if (Run.aliases.containsKey(line.trim())) { - line = Run.aliases.get(line.trim()); - } - - if (aliased) { - // print the actual line in grey + italic - INSTANCE.commandLine.getOut().println(Ansi.AUTO.string("@|gray" + line + - "|@")); - } - - systemRegistry.execute(line); - - if (!aliased) { - history.write(historyFile.toPath(), false); - } - - } catch (UserInterruptException e) { - // TODO send interrupt signal to running tasks - } catch (EndOfFileException e) { - System.exit(0); - } catch (Exception e) { - systemRegistry.trace(e); - } - } - } - } catch (Throwable t) { - t.printStackTrace(); - } finally { - // AnsiConsole.systemUninstall(); - } + @Override + public void run() { + Run.aliases.remove(alias); + Run.storeAliases(); + } } - - /** - * Parse the string for context navigation operators and set the current context to whatever has been - * asked for. - * - * @param line - */ - private void setFocalScope(String line) { - - if (line.trim().equals(".")) { - - printContextInfo(); - - } else if (line.trim().equals("..") || line.trim().equals("<")) { - - Scope scope = modeler == null ? null : (modeler.getCurrentSession() == null ? modeler.user() : - (modeler.getCurrentContext() == null ? - modeler.getCurrentSession() : - modeler.getCurrentContext())); - // context setting - if (scope == null) { - INSTANCE.commandLine.getOut().println("No current scope"); - } else if (scope.getType() == Scope.Type.CONTEXT) { - - var parent = scope.getParentScope(); - if (parent != null && parent.getType() == Scope.Type.CONTEXT) { - modeler.setCurrentContext((ContextScope) parent); - } else if (parent != null && parent.getType() == Scope.Type.SESSION) { - modeler.setCurrentContext(null); - } - printContextInfo(); - - } else if (scope.getType() == Scope.Type.SESSION) { - modeler.setCurrentContext(null); - modeler.setCurrentSession(null); - printContextInfo(); + @Command( + name = "purge", + mixinStandardHelpOptions = true, + description = {"Remove finished or " + "aborted " + "behaviors" + " from the " + "list."}) + static class Purge implements Runnable { + + @Parameters( + description = { + "The numeric ID of the scripts we want to purge. No argument " + + "removes" + + " all that have " + + "finished.", + "Run \"run list\" to " + "know " + "the IDs." + }) + java.util.List appIds = new ArrayList<>(); + + @ParentCommand Run parent; + + @Override + public void run() { + if (appIds.isEmpty()) { + java.util.Set removed = new HashSet<>(); + for (SessionScope s : parent.running) { + if (s.getStatus() != Status.STARTED && s.getStatus() != Status.WAITING) { + s.close(); + removed.add(s); } - - } else if (line.startsWith("<<")) { - this.modeler.setCurrentContext(null); - this.modeler.setCurrentSession(null); - } else if (line.startsWith("<")) { - // must have something after the < - } else if (line.startsWith(">")) { - // show list of potential downstream observations or choose the one after the > - } else if (line.startsWith("@")) { - // show list of observers or choose the one after the @ + } + parent.running.removeAll(removed); + } else { + java.util.List scopes = new ArrayList<>(parent.running); + for (int appId : appIds) { + SessionScope s = scopes.get(appId + 1); + s.close(); + parent.running.remove(s); + } } - - /* - * TODO - * < goes back one level of context observation (if any) - * << goes back to the userscope level - * >> goes to the innermost non-ambiguous scope and shows what's under it - * > obsId sets the ID'd context observation as the current context or resets it if no obsId is - * given - * (equivalent to <) - * @ obsId sets the observer or resets if no obsId is given - * ? n prints out the currently known observations (at level n, 1 if not given, full tree if n == - * 'all') - * ?? prints the same info as ? but much more in detail - */ - var currentContext = user(); - if (modeler.getCurrentContext() != null) { - currentContext = modeler.getCurrentContext(); - } else if (modeler.getCurrentSession() != null) { - currentContext = modeler.getCurrentSession(); + parent.list(); + } + } + } + + public static void main(String[] args) { + // AnsiConsole.systemInstall(); + + INSTANCE.options = CLIStartupOptions.create(args); + + try { + + // create the modeler + INSTANCE.modeler = new CommandLineModeler(); + // Configure messages for CLI use + INSTANCE.modeler.setOption(ModelerImpl.Option.UseAnsiEscapeSequences, true); + + Supplier workDir = + () -> + Paths.get( + System.getProperty("user.home") + + File.separator + + ".klab" + + File.separator + + "kcli"); + + // jline built-in commands + workDir.get().toFile().mkdirs(); + ConfigurationPath configPath = new ConfigurationPath(workDir.get(), workDir.get()); + Builtins builtins = new Builtins(workDir, configPath, null); + builtins.rename(Builtins.Command.TTOP, "top"); + builtins.alias("zle", "widget"); + builtins.alias("bindkey", "keymap"); + + // picocli + CliCommands commands = new CliCommands(); + PicocliCommandsFactory factory = new PicocliCommandsFactory(); + INSTANCE.commandLine = new CommandLine(commands, factory); + PicocliCommands picocliCommands = new PicocliCommands(INSTANCE.commandLine); + File historyFile = + new File(Configuration.INSTANCE.getDataPath() + File.separator + "kcli" + ".history"); + Parser parser = new DefaultParser(); + try (Terminal terminal = TerminalBuilder.builder().build()) { + + SystemRegistry systemRegistry = new SystemRegistryImpl(parser, terminal, workDir, null); + systemRegistry.setCommandRegistries(builtins, picocliCommands); + systemRegistry.register("help", picocliCommands); + KlabCompleter completer = new KlabCompleter(systemRegistry.completer()); + History history = new DefaultHistory(); + INSTANCE.reader = + LineReaderBuilder.builder() + .terminal(terminal) + .completer(completer) + .parser(parser) + .variable(LineReader.LIST_MAX, 50) // candidates + .history(history) + .build(); + + builtins.setLineReader(INSTANCE.reader); + commands.setReader(INSTANCE.reader); + factory.setTerminal(terminal); + history.attach(INSTANCE.reader); + + TailTipWidgets widgets = + new TailTipWidgets( + INSTANCE.reader, + systemRegistry::commandDescription, + 5, + TailTipWidgets.TipType.COMPLETER); + widgets.enable(); + KeyMap keyMap = INSTANCE.reader.getKeyMaps().get("main"); + keyMap.bind(new Reference("tailtip-toggle"), KeyMap.alt("s")); + + /** If we have a command, run it and exit FIXME use options field */ + if (args != null && args.length > 0) { + String line = Utils.Strings.join(args, ' '); + try { + systemRegistry.execute(line); + } catch (Throwable t) { + t.printStackTrace(); + System.exit(0xff); + } + System.exit(0); } - if (currentContext == null) { - INSTANCE.commandLine.getOut().println("No context"); - return; + if (historyFile.exists()) { + history.read(historyFile.toPath(), true); } - var runtime = currentContext.getService(RuntimeService.class); - if (runtime == null) { - INSTANCE.commandLine.getOut().println("No runtime service connected"); - return; - } + Run.loadAliases(); + + // boot the engine. This will schedule processes so it wont'delay startup. + INSTANCE.modeler.boot(); + + // start the shell and process input until the user quits with Ctrl-D + String line; + while (true) { + try { + + systemRegistry.cleanUp(); + line = + INSTANCE.reader.readLine( + INSTANCE.prompt, INSTANCE.getContextPrompt(), (MaskingCallback) null, null); + completer.resetSemanticSearch(); + boolean aliased = false; + + /* + * Use <, >, .. to move through context observations, @ to set/reset the + * observer and + * ./? to inquire about the current context in detail. The right prompt + * summarizes + * the current context focus. + */ + if (line.trim().startsWith(".") + || line.trim().startsWith("<") + || line.trim().startsWith("@") + || line.trim().startsWith(">") + || line.trim().startsWith("?")) { + INSTANCE.setFocalScope(line.trim()); + continue; + } else if (line.trim().startsWith("-")) { + if (line.trim().equals("-") && history.size() > 0) { + line = history.get(history.last() - 1); + aliased = true; + } else if (org.integratedmodelling.klab.api.utils.Utils.Numbers.encodesInteger( + line.trim().substring(1))) { + int n = Integer.parseInt(line.trim().substring(1)); + if (history.size() > n) { + line = history.get(history.last() - n); + aliased = true; + } + } + } else if (Run.aliases.containsKey(line.trim())) { + line = Run.aliases.get(line.trim()); + } - boolean verbose = line.startsWith("??"); - var sessionInfo = runtime.getSessionInfo(currentContext); + if (aliased) { + // print the actual line in grey + italic + INSTANCE.commandLine.getOut().println(Ansi.AUTO.string("@|gray" + line + "|@")); + } - if (line.startsWith("?")) { + systemRegistry.execute(line); - int n = 0; - for (var session : sessionInfo) { - listSession(session, verbose, ++n); + if (!aliased) { + history.write(historyFile.toPath(), false); } + } catch (UserInterruptException e) { + // TODO send interrupt signal to running tasks + } catch (EndOfFileException e) { + System.exit(0); + } catch (Exception e) { + systemRegistry.trace(e); + } } - + } + } catch (Throwable t) { + t.printStackTrace(); + } finally { + // AnsiConsole.systemUninstall(); } - - private void printContextInfo() { - if (modeler != null && modeler.getCurrentSession() != null) { - INSTANCE.commandLine.getOut().println(Ansi.AUTO.string("Session: @|green " + modeler.getCurrentSession().getName() + "|@")); - if (modeler.getCurrentContext() != null) { - INSTANCE.commandLine.getOut().println(Ansi.AUTO.string(" Context: @|green " + modeler.getCurrentContext().getName() + "|@")); - if (modeler.getCurrentContext().getObserver() != null) { - INSTANCE.commandLine.getOut().println(Ansi.AUTO.string(" Observer: @|green " + modeler.getCurrentContext().getObserver() + "|@")); - } - if (modeler.getCurrentContext().getContextObservation() != null) { - INSTANCE.commandLine.getOut().println(Ansi.AUTO.string(" Within: @|green " + modeler.getCurrentContext().getContextObservation() + "|@")); - } - } + } + + /** + * Parse the string for context navigation operators and set the current context to whatever has + * been asked for. + * + * @param line + */ + private void setFocalScope(String line) { + + if (line.trim().equals(".")) { + + printContextInfo(); + + } else if (line.trim().equals("..") || line.trim().equals("<")) { + + Scope scope = + modeler == null + ? null + : (modeler.getCurrentSession() == null + ? modeler.user() + : (modeler.getCurrentContext() == null + ? modeler.getCurrentSession() + : modeler.getCurrentContext())); + // context setting + if (scope == null) { + INSTANCE.commandLine.getOut().println("No current scope"); + } else if (scope.getType() == Scope.Type.CONTEXT) { + + var parent = scope.getParentScope(); + if (parent != null && parent.getType() == Scope.Type.CONTEXT) { + modeler.setCurrentContext((ContextScope) parent); + } else if (parent != null && parent.getType() == Scope.Type.SESSION) { + modeler.setCurrentContext(null); } + printContextInfo(); + + } else if (scope.getType() == Scope.Type.SESSION) { + modeler.setCurrentContext(null); + modeler.setCurrentSession(null); + printContextInfo(); + } + + } else if (line.startsWith("<<")) { + this.modeler.setCurrentContext(null); + this.modeler.setCurrentSession(null); + } else if (line.startsWith("<")) { + // must have something after the < + } else if (line.startsWith(">")) { + // show list of potential downstream observations or choose the one after the > + } else if (line.startsWith("@")) { + // show list of observers or choose the one after the @ } - private void listSession(SessionInfo session, boolean verbose, int index) { - - INSTANCE.commandLine.getOut().println(Ansi.AUTO.string("@|green Session " + index + "|@. " + session.getName() + " [" + session.getId() + "]")); - int n = 0; - for (var context : session.getContexts()) { - INSTANCE.commandLine.getOut().println(Ansi.AUTO.string(" @|yellow Context " + index + "." + (++n) + "|@. " + context.getName() + " [" + context.getId() + "]")); - if (verbose) { + /* + * TODO + * < goes back one level of context observation (if any) + * << goes back to the userscope level + * >> goes to the innermost non-ambiguous scope and shows what's under it + * > obsId sets the ID'd context observation as the current context or resets it if no obsId is + * given + * (equivalent to <) + * @ obsId sets the observer or resets if no obsId is given + * ? n prints out the currently known observations (at level n, 1 if not given, full tree if n == + * 'all') + * ?? prints the same info as ? but much more in detail + */ + var currentContext = user(); + if (modeler.getCurrentContext() != null) { + currentContext = modeler.getCurrentContext(); + } else if (modeler.getCurrentSession() != null) { + currentContext = modeler.getCurrentSession(); + } - } - } + if (currentContext == null) { + INSTANCE.commandLine.getOut().println("No context"); + return; + } + var runtime = currentContext.getService(RuntimeService.class); + if (runtime == null) { + INSTANCE.commandLine.getOut().println("No runtime service connected"); + return; } - // private void onEvent(Scope scope, Message message) { - // - // switch (message.getMessageClass()) { - // case UserInterface -> { - // } - // case UserContextChange -> { - // } - // case UserContextDefinition -> { - // } - // case ServiceLifecycle -> { - // switch (message.getMessageType()) { - // case ServiceAvailable -> { - // var capabilities = message.getPayload(KlabService.ServiceCapabilities - // .class); - // commandLine.getOut().println(Ansi.AUTO.string("@|blue " + capabilities - // .getType() + - // " service available: " + capabilities.getServiceName() - // + "|@")); - // - // } - // case ServiceInitializing -> { - // var description = message.getPayload(KlabService.ServiceCapabilities.class); - // commandLine.getOut().println(Ansi.AUTO.string("@|blue " - // + "service initializing: " + description - // + "|@")); - // - // } - // case ServiceUnavailable -> { - // var capabilities = message.getPayload(KlabService.ServiceCapabilities - // .class); - // commandLine.getOut().println(Ansi.AUTO.string("@|blue " + capabilities - // .getType() + - // " service unavailable: " + capabilities.getServiceName() - // + "|@")); - // } - // } - // } - // case EngineLifecycle -> { - // } - // case KimLifecycle -> { - // } - // case ResourceLifecycle -> { - // } - // case ProjectLifecycle -> { - // } - // case Authorization -> { - // } - // case TaskLifecycle -> { - // } - // case ObservationLifecycle -> { - // } - // case SessionLifecycle -> { - // } - // case UnitTests -> { - // } - // case Notification -> { - // switch (message.getMessageType()) { - // case Info -> { - // commandLine.getOut().println(Ansi.AUTO.string("@|blue " + message.getPayload - // (Notification.class).getMessage() - // + "|@")); - // } - // case Error -> { - // commandLine.getOut().println(Ansi.AUTO.string("@|red " + message.getPayload - // (Notification.class).getMessage() - // + "|@")); - // } - // case Debug -> { - // commandLine.getOut().println(Ansi.AUTO.string("@|gray " + message.getPayload - // (Notification.class).getMessage() - // + "|@")); - // } - // case Warning -> { - // commandLine.getOut().println(Ansi.AUTO.string("@|yellow " + message - // .getPayload(Notification.class).getMessage() - // + "|@")); - // } - // default -> { - // } - // } - // } - // case Search -> { - // } - // case Query -> { - // } - // case Run -> { - // } - // case ViewActor -> { - // } - // case ActorCommunication -> { - // } - // default -> { - // } - // } - // - // if (message.getMessageClass() == Message.MessageClass.Notification) { - // - // } - // } - - public static void printResourceSet(ResourceSet resourceSet, PrintStream out, int indent) { - - if (resourceSet == null) { - out.println(Utils.Strings.spaces(indent) + "Null resource set"); - } else if (resourceSet.isEmpty()) { - out.println(Utils.Strings.spaces(indent) + "Empty resource set"); - } else { - // TODO - out.println("Namespaces:"); - for (ResourceSet.Resource namespace : resourceSet.getNamespaces()) { - out.println(" " + namespace); - } + boolean verbose = line.startsWith("??"); + var sessionInfo = runtime.getSessionInfo(currentContext); + + if (line.startsWith("?")) { + int n = 0; + for (var session : sessionInfo) { + listSession(session, verbose, ++n); + } + } + } + + private void printContextInfo() { + if (modeler != null && modeler.getCurrentSession() != null) { + INSTANCE + .commandLine + .getOut() + .println( + Ansi.AUTO.string("Session: @|green " + modeler.getCurrentSession().getName() + "|@")); + if (modeler.getCurrentContext() != null) { + INSTANCE + .commandLine + .getOut() + .println( + Ansi.AUTO.string( + " Context: @|green " + modeler.getCurrentContext().getName() + "|@")); + if (modeler.getCurrentContext().getObserver() != null) { + INSTANCE + .commandLine + .getOut() + .println( + Ansi.AUTO.string( + " Observer: @|green " + + modeler.getCurrentContext().getObserver() + + "|@")); + } + if (modeler.getCurrentContext().getContextObservation() != null) { + INSTANCE + .commandLine + .getOut() + .println( + Ansi.AUTO.string( + " Within: @|green " + + modeler.getCurrentContext().getContextObservation() + + "|@")); } + } } - + } + + private void listSession(SessionInfo session, boolean verbose, int index) { + + INSTANCE + .commandLine + .getOut() + .println( + Ansi.AUTO.string( + "@|green Session " + + index + + "|@. " + + session.getName() + + " [" + + session.getId() + + "]")); + int n = 0; + for (var context : session.getContexts()) { + INSTANCE + .commandLine + .getOut() + .println( + Ansi.AUTO.string( + " @|yellow Context " + + index + + "." + + (++n) + + "|@. " + + context.getName() + + " [" + + context.getId() + + "]")); + if (verbose) {} + } + } + + // private void onEvent(Scope scope, Message message) { + // + // switch (message.getMessageClass()) { + // case UserInterface -> { + // } + // case UserContextChange -> { + // } + // case UserContextDefinition -> { + // } + // case ServiceLifecycle -> { + // switch (message.getMessageType()) { + // case ServiceAvailable -> { + // var capabilities = message.getPayload(KlabService.ServiceCapabilities + // .class); + // commandLine.getOut().println(Ansi.AUTO.string("@|blue " + capabilities + // .getType() + + // " service available: " + capabilities.getServiceName() + // + "|@")); + // + // } + // case ServiceInitializing -> { + // var description = + // message.getPayload(KlabService.ServiceCapabilities.class); + // commandLine.getOut().println(Ansi.AUTO.string("@|blue " + // + "service initializing: " + description + // + "|@")); + // + // } + // case ServiceUnavailable -> { + // var capabilities = message.getPayload(KlabService.ServiceCapabilities + // .class); + // commandLine.getOut().println(Ansi.AUTO.string("@|blue " + capabilities + // .getType() + + // " service unavailable: " + capabilities.getServiceName() + // + "|@")); + // } + // } + // } + // case EngineLifecycle -> { + // } + // case KimLifecycle -> { + // } + // case ResourceLifecycle -> { + // } + // case ProjectLifecycle -> { + // } + // case Authorization -> { + // } + // case TaskLifecycle -> { + // } + // case ObservationLifecycle -> { + // } + // case SessionLifecycle -> { + // } + // case UnitTests -> { + // } + // case Notification -> { + // switch (message.getMessageType()) { + // case Info -> { + // commandLine.getOut().println(Ansi.AUTO.string("@|blue " + + // message.getPayload + // (Notification.class).getMessage() + // + "|@")); + // } + // case Error -> { + // commandLine.getOut().println(Ansi.AUTO.string("@|red " + + // message.getPayload + // (Notification.class).getMessage() + // + "|@")); + // } + // case Debug -> { + // commandLine.getOut().println(Ansi.AUTO.string("@|gray " + + // message.getPayload + // (Notification.class).getMessage() + // + "|@")); + // } + // case Warning -> { + // commandLine.getOut().println(Ansi.AUTO.string("@|yellow " + message + // .getPayload(Notification.class).getMessage() + // + "|@")); + // } + // default -> { + // } + // } + // } + // case Search -> { + // } + // case Query -> { + // } + // case Run -> { + // } + // case ViewActor -> { + // } + // case ActorCommunication -> { + // } + // default -> { + // } + // } + // + // if (message.getMessageClass() == Message.MessageClass.Notification) { + // + // } + // } + + public static void printResourceSet(ResourceSet resourceSet, PrintStream out, int indent) { + + if (resourceSet == null) { + out.println(Utils.Strings.spaces(indent) + "Null resource set"); + } else if (resourceSet.isEmpty()) { + out.println(Utils.Strings.spaces(indent) + "Empty resource set"); + } else { + // TODO + out.println("Namespaces:"); + for (ResourceSet.Resource namespace : resourceSet.getNamespaces()) { + out.println(" " + namespace); + } + } + } } diff --git a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIAuthenticationView.java b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIAuthenticationView.java index 88e1e1e5d..a70cd5b10 100644 --- a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIAuthenticationView.java +++ b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIAuthenticationView.java @@ -4,8 +4,6 @@ import org.integratedmodelling.klab.api.view.modeler.views.AuthenticationView; public class CLIAuthenticationView extends CLIView implements AuthenticationView { - @Override - public void notifyUser(UserIdentity identity) { - - } + @Override + public void notifyUser(UserIdentity identity) {} } diff --git a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIDistributionView.java b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIDistributionView.java index d957490f0..e8c4bb374 100644 --- a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIDistributionView.java +++ b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIDistributionView.java @@ -2,5 +2,4 @@ import org.integratedmodelling.klab.api.view.modeler.panels.DistributionView; -public class CLIDistributionView extends CLIView implements DistributionView { -} +public class CLIDistributionView extends CLIView implements DistributionView {} diff --git a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIObservationView.java b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIObservationView.java index 9ba2a6691..20f8c9b5d 100644 --- a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIObservationView.java +++ b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIObservationView.java @@ -1,11 +1,12 @@ package org.integratedmodelling.cli.views; +import java.io.PrintWriter; +import java.util.List; import org.integratedmodelling.cli.KlabCLI; import org.integratedmodelling.common.knowledge.KnowledgeRepository; import org.integratedmodelling.klab.api.data.Version; import org.integratedmodelling.klab.api.engine.Engine; import org.integratedmodelling.klab.api.knowledge.KlabAsset; -import org.integratedmodelling.klab.api.knowledge.Knowledge; import org.integratedmodelling.klab.api.scope.SessionScope; import org.integratedmodelling.klab.api.services.ResourcesService; import org.integratedmodelling.klab.api.services.RuntimeService; @@ -15,277 +16,306 @@ import org.integratedmodelling.klab.api.view.modeler.views.controllers.ContextViewController; import picocli.CommandLine; -import java.io.PrintWriter; -import java.util.List; - -@CommandLine.Command(name = "observe", mixinStandardHelpOptions = true, version = Version.CURRENT, - description = { - "Commands to create, access and manipulate contexts.", - ""}, subcommands = {CLIObservationView.Session.class, - CLIObservationView.Context.class, - CLIObservationView.Clear.class}) +@CommandLine.Command( + name = "observe", + mixinStandardHelpOptions = true, + version = Version.CURRENT, + description = {"Commands to create, access and manipulate contexts.", ""}, + subcommands = { + CLIObservationView.Session.class, + CLIObservationView.Context.class, + CLIObservationView.Clear.class + }) public class CLIObservationView extends CLIView implements ContextView, Runnable { - private static ContextViewController controller; + private static ContextViewController controller; + + public CLIObservationView() { + controller = KlabCLI.INSTANCE.modeler().viewController(ContextViewController.class); + controller.registerView(this); + } + + @CommandLine.Spec CommandLine.Model.CommandSpec commandSpec; + + @CommandLine.Option( + names = {"-a", "--add"}, + defaultValue = "false", + description = {"Add to existing context as a parallel observation"}, + required = false) + boolean addToContext = false; + + @CommandLine.Option( + names = {"-w", "--within"}, + defaultValue = CommandLine.Parameters.NULL_VALUE, + description = { + "Choose an observation to become the context of the observation.", + "Use a dot to select the root subject if there is one." + }, + required = false) + private String within; + + @CommandLine.Option( + names = {"-g", "--geometry"}, + defaultValue = CommandLine.Parameters.NULL_VALUE, + description = { + "Override the geometry for the new observation (must be a " + "countable/substantial)." + }, + required = false) + private String geometry; + + @CommandLine.Parameters List observables; + + @Override + public void run() { + + PrintWriter out = commandSpec.commandLine().getOut(); + PrintWriter err = commandSpec.commandLine().getErr(); + + if (observables == null || observables.isEmpty()) { + // int n = 1; + // if (observationsMade.isEmpty()) { + // out.println(CommandLine.Help.Ansi.AUTO.string("@|yellow No previous + // observations|@ ")); + // } + // for (var urn : observationsMade) { + // out.println(CommandLine.Help.Ansi.AUTO.string("@|yellow " + n + ".|@ " + + // urn)); + // } + return; + } - public CLIObservationView() { - controller = KlabCLI.INSTANCE.modeler().viewController(ContextViewController.class); - controller.registerView(this); + String urn = Utils.Strings.join(observables, " "); + + if (Utils.Numbers.encodesInteger(urn)) { + int n = Integer.parseInt(urn) - 1; + // if (n < 0 || observationsMade.size() >= n) { + // err.println("No previous observation at index " + n); + // return; + // } + // // FIXME use SessionInfo for everything, remove any state from the controller + // and + // engine except + // // the current session/ctx + // urn = observationsMade.get(n); + } else { + // observationsMade.add(urn); } - @CommandLine.Spec - CommandLine.Model.CommandSpec commandSpec; + var resources = KlabCLI.INSTANCE.user().getService(ResourcesService.class); + var resolvable = resources.resolve(urn, KlabCLI.INSTANCE.user()); + var results = + KnowledgeRepository.INSTANCE.ingest(resolvable, KlabCLI.INSTANCE.user(), KlabAsset.class); + + // TODO this is only for root observations + if (!results.isEmpty()) { + out.println( + CommandLine.Help.Ansi.AUTO.string( + "Observation of @|yellow " + + urn + + "|@ " + + "started in " + + results.getFirst().getUrn())); + KlabCLI.INSTANCE.modeler().observe(results.getFirst(), addToContext); + } else { + err.println( + CommandLine.Help.Ansi.AUTO.string( + "Can't resolve URN @|yellow " + urn + "|@ to " + "observable knowledge")); + } + } - @CommandLine.Option(names = {"-a", "--add"}, defaultValue = "false", - description = {"Add to existing context as a parallel observation"}, required = - false) - boolean addToContext = false; + @CommandLine.Command( + name = "close", + mixinStandardHelpOptions = true, + version = Version.CURRENT, + description = {"Close the active digital twin or session and delete all " + "observations"}) + public static class Clear implements Runnable { - @CommandLine.Option(names = {"-w", "--within"}, defaultValue = CommandLine.Parameters.NULL_VALUE, - description = { - "Choose an observation to become the context of the observation.", - "Use a dot to select the root subject if there is one."}, required = - false) - private String within; + @CommandLine.ParentCommand CLIObservationView parent; - @CommandLine.Option(names = {"-g", "--geometry"}, defaultValue = CommandLine.Parameters.NULL_VALUE, - description = { - "Override the geometry for the new observation (must be a " + - "countable/substantial)."}, required - = false) - private String geometry; + @CommandLine.Spec CommandLine.Model.CommandSpec commandSpec; - @CommandLine.Parameters - List observables; + @CommandLine.Option( + names = {"-f", "--force"}, + defaultValue = "false", + description = {"Close the current scope without asking for confirmation"}, + required = false) + boolean force = false; @Override public void run() { - PrintWriter out = commandSpec.commandLine().getOut(); - PrintWriter err = commandSpec.commandLine().getErr(); - - if (observables == null || observables.isEmpty()) { - // int n = 1; - // if (observationsMade.isEmpty()) { - // out.println(CommandLine.Help.Ansi.AUTO.string("@|yellow No previous - // observations|@ ")); - // } - // for (var urn : observationsMade) { - // out.println(CommandLine.Help.Ansi.AUTO.string("@|yellow " + n + ".|@ " + urn)); - // } - return; - } - - String urn = Utils.Strings.join(observables, " "); - - if (Utils.Numbers.encodesInteger(urn)) { - int n = Integer.parseInt(urn) - 1; - // if (n < 0 || observationsMade.size() >= n) { - // err.println("No previous observation at index " + n); - // return; - // } - // // FIXME use SessionInfo for everything, remove any state from the controller and - // engine except - // // the current session/ctx - // urn = observationsMade.get(n); - } else { - // observationsMade.add(urn); - } - - var resources = KlabCLI.INSTANCE.user().getService(ResourcesService.class); - var resolvable = resources.resolve(urn, KlabCLI.INSTANCE.user()); - var results = KnowledgeRepository.INSTANCE.ingest(resolvable, KlabCLI.INSTANCE.user(), - KlabAsset.class); - - // TODO this is only for root observations - if (!results.isEmpty()) { - out.println(CommandLine.Help.Ansi.AUTO.string("Observation of @|yellow " + urn + "|@ " + - "started in " - + results.getFirst().getUrn())); - KlabCLI.INSTANCE.modeler().observe(results.getFirst(), addToContext); - } else { - err.println(CommandLine.Help.Ansi.AUTO.string("Can't resolve URN @|yellow " + urn + "|@ to " + - "observable knowledge")); + PrintWriter out = commandSpec.commandLine().getOut(); + PrintWriter err = commandSpec.commandLine().getErr(); + + boolean isSession = false; + Channel context = KlabCLI.INSTANCE.modeler().getCurrentContext(); + if (context == null) { + context = KlabCLI.INSTANCE.modeler().getCurrentSession(); + isSession = true; + } + + if (context == null) { + err.println("No current context or session."); + return; + } + + if (force + || KlabCLI.INSTANCE.confirm( + "Delete the current " + + (isSession ? "session" : "context") + + " and ALL " + + (isSession ? "contexts and observations in them" : "observations in it"))) { + + context.close(); + + out.println( + (isSession ? "Session" : "Context") + + " has been permanently closed and all " + + "data have been deleted"); + + KlabCLI.INSTANCE.modeler().setCurrentContext(null); + if (isSession) { + KlabCLI.INSTANCE.modeler().setCurrentSession(null); } + } } + } - @CommandLine.Command(name = "close", mixinStandardHelpOptions = true, version = Version.CURRENT, - description = {"Close the active digital twin or session and delete all " + - "observations"}) - public static class Clear implements Runnable { - - @CommandLine.ParentCommand - CLIObservationView parent; + /* ---- subcommands ---- */ - @CommandLine.Spec - CommandLine.Model.CommandSpec commandSpec; + @CommandLine.Command( + name = "session", + mixinStandardHelpOptions = true, + version = Version.CURRENT, + description = { + "List the active sessions and optionally choose one by number or " + "name", + "" + }, + subcommands = {Session.New.class}) + public static class Session implements Runnable { - @CommandLine.Option(names = {"-f", "--force"}, defaultValue = "false", - description = {"Close the current scope without asking for confirmation"}, - required = - false) - boolean force = false; + @CommandLine.ParentCommand CLIObservationView parent; - @Override - public void run() { + @CommandLine.Parameters(defaultValue = "__NULL__") + String sessionNumberOrId; - PrintWriter out = commandSpec.commandLine().getOut(); - PrintWriter err = commandSpec.commandLine().getErr(); + @CommandLine.Spec CommandLine.Model.CommandSpec commandSpec; - boolean isSession = false; - Channel context = KlabCLI.INSTANCE.modeler().getCurrentContext(); - if (context == null) { - context = KlabCLI.INSTANCE.modeler().getCurrentSession(); - isSession = true; - } - - if (context == null) { - err.println("No current context or session."); - return; - } + @CommandLine.Option(names = "-v", description = "print session information when listing") + boolean verbose; - if (force || KlabCLI.INSTANCE.confirm("Delete the current " - + (isSession ? "session" : "context") + " and ALL " - + (isSession ? "contexts and observations in them" : "observations in it"))) { - - context.close(); - - out.println((isSession ? "Session" : "Context") + " has been permanently closed and all " + - "data have been deleted"); - - KlabCLI.INSTANCE.modeler().setCurrentContext(null); - if (isSession) { - KlabCLI.INSTANCE.modeler().setCurrentSession(null); - } - } - } + private static String displaySession(SessionScope session) { + // TODO improve and react to verbose flag + return "<" + session.getName() + ", id=" + session.getId() + ">"; } - /* ---- subcommands ---- */ - - @CommandLine.Command(name = "session", mixinStandardHelpOptions = true, version = Version.CURRENT, - description = {"List the active sessions and optionally choose one by number or " + - "name", ""}, subcommands = {Session.New.class}) - public static class Session implements Runnable { - - @CommandLine.ParentCommand - CLIObservationView parent; + @Override + public void run() { - @CommandLine.Parameters(defaultValue = "__NULL__") - String sessionNumberOrId; + PrintWriter out = commandSpec.commandLine().getOut(); + PrintWriter err = commandSpec.commandLine().getErr(); - @CommandLine.Spec - CommandLine.Model.CommandSpec commandSpec; + if ("__NULL__".equals(sessionNumberOrId)) { - @CommandLine.Option(names = "-v", description = "print session information when listing") - boolean verbose; + var runtime = KlabCLI.INSTANCE.user().getService(RuntimeService.class); - private static String displaySession(SessionScope session) { - // TODO improve and react to verbose flag - return "<" + session.getName() + ", id=" + session.getId() + ">"; + for (var session : runtime.getSessionInfo(KlabCLI.INSTANCE.user())) { + // TODO this is the proper way } - @Override - public void run() { - - PrintWriter out = commandSpec.commandLine().getOut(); - PrintWriter err = commandSpec.commandLine().getErr(); - - if ("__NULL__".equals(sessionNumberOrId)) { - - var runtime = KlabCLI.INSTANCE.user().getService(RuntimeService.class); - - for (var session : runtime.getSessionInfo(KlabCLI.INSTANCE.user())) { - // TODO this is the proper way - } - - // FIXME below is the wrong way. Engine should only have a current session, the selected - // runtime knows the rest. - int n = 1; - if (KlabCLI.INSTANCE.modeler().getOpenSessions().isEmpty()) { - out.println(CommandLine.Help.Ansi.AUTO.string("@|yellow No sessions|@ ")); - } - for (var session : KlabCLI.INSTANCE.modeler().getOpenSessions()) { - out.println(CommandLine.Help.Ansi.AUTO.string("@|green " + n + ". " + displaySession(session) + "|@")); - } - return; - - } else { - - SessionScope selected = null; - - if (Utils.Numbers.encodesInteger(sessionNumberOrId)) { - int n = Integer.parseInt(sessionNumberOrId) - 1; - if (n > 0 && KlabCLI.INSTANCE.modeler().getOpenSessions().size() < n) { - selected = KlabCLI.INSTANCE.modeler().getOpenSessions().get(n); - } - } else for (var session : KlabCLI.INSTANCE.modeler().getOpenSessions()) { - if (sessionNumberOrId.equals(session.getName()) || sessionNumberOrId.equals(session.getId())) { - selected = session; - break; - } - } - - // select the session with the passed number or name/ID - if (selected != null) { - KlabCLI.INSTANCE.modeler().setCurrentSession(selected); - out.println(CommandLine.Help.Ansi.AUTO.string("@|green Session " + displaySession(selected) + "selected|@ ")); - } + // FIXME below is the wrong way. Engine should only have a current session, the selected + // runtime knows the rest. + int n = 1; + if (KlabCLI.INSTANCE.modeler().getOpenSessions().isEmpty()) { + out.println(CommandLine.Help.Ansi.AUTO.string("@|yellow No sessions|@ ")); + } + for (var session : KlabCLI.INSTANCE.modeler().getOpenSessions()) { + out.println( + CommandLine.Help.Ansi.AUTO.string( + "@|green " + n + ". " + displaySession(session) + "|@")); + } + return; + + } else { + + SessionScope selected = null; + + if (Utils.Numbers.encodesInteger(sessionNumberOrId)) { + int n = Integer.parseInt(sessionNumberOrId) - 1; + if (n > 0 && KlabCLI.INSTANCE.modeler().getOpenSessions().size() < n) { + selected = KlabCLI.INSTANCE.modeler().getOpenSessions().get(n); + } + } else + for (var session : KlabCLI.INSTANCE.modeler().getOpenSessions()) { + if (sessionNumberOrId.equals(session.getName()) + || sessionNumberOrId.equals(session.getId())) { + selected = session; + break; } + } + + // select the session with the passed number or name/ID + if (selected != null) { + KlabCLI.INSTANCE.modeler().setCurrentSession(selected); + out.println( + CommandLine.Help.Ansi.AUTO.string( + "@|green Session " + displaySession(selected) + "selected|@ ")); } + } + } - @CommandLine.Command(name = "new", mixinStandardHelpOptions = true, version = Version.CURRENT, - description = {"Create a new session and make it current.", ""}, subcommands = - {}) - public static class New implements Runnable { - - @CommandLine.ParentCommand - Session parent; + @CommandLine.Command( + name = "new", + mixinStandardHelpOptions = true, + version = Version.CURRENT, + description = {"Create a new session and make it current.", ""}, + subcommands = {}) + public static class New implements Runnable { - @CommandLine.Parameters(defaultValue = "__NULL__") - String sessionName; + @CommandLine.ParentCommand Session parent; - @CommandLine.Spec - CommandLine.Model.CommandSpec commandSpec; + @CommandLine.Parameters(defaultValue = "__NULL__") + String sessionName; - @Override - public void run() { + @CommandLine.Spec CommandLine.Model.CommandSpec commandSpec; - PrintWriter out = commandSpec.commandLine().getOut(); - PrintWriter err = commandSpec.commandLine().getErr(); + @Override + public void run() { - String sessionName = "__NULL__".equals(this.sessionName) ? - ("Session " + (KlabCLI.INSTANCE.modeler().getOpenSessions().size() + 1)) : this.sessionName; - var ret = KlabCLI.INSTANCE.modeler().openNewSession(sessionName); - out.println(CommandLine.Help.Ansi.AUTO.string("@|green New session " + displaySession(ret)) + - " created|@"); + PrintWriter out = commandSpec.commandLine().getOut(); + PrintWriter err = commandSpec.commandLine().getErr(); - } - } + String sessionName = + "__NULL__".equals(this.sessionName) + ? ("Session " + (KlabCLI.INSTANCE.modeler().getOpenSessions().size() + 1)) + : this.sessionName; + var ret = KlabCLI.INSTANCE.modeler().openNewSession(sessionName); + out.println( + CommandLine.Help.Ansi.AUTO.string("@|green New session " + displaySession(ret)) + + " created|@"); + } } + } + @CommandLine.Command( + name = "context", + mixinStandardHelpOptions = true, + version = Version.CURRENT, + description = {"Connect to an existing context.", ""}, + subcommands = {}) + public static class Context implements Runnable { - @CommandLine.Command(name = "context", mixinStandardHelpOptions = true, version = Version.CURRENT, - description = {"Connect to an existing context.", ""}, subcommands = {}) - public static class Context implements Runnable { - - @CommandLine.ParentCommand - CLIObservationView parent; - - @Override - public void run() { - var runtime = KlabCLI.INSTANCE.user().getService(RuntimeService.class); - for (var session : runtime.getSessionInfo(KlabCLI.INSTANCE.user())) { - - } - } + @CommandLine.ParentCommand CLIObservationView parent; + @Override + public void run() { + var runtime = KlabCLI.INSTANCE.user().getService(RuntimeService.class); + for (var session : runtime.getSessionInfo(KlabCLI.INSTANCE.user())) {} } + } - /* ---- view methods ---- */ + /* ---- view methods ---- */ - @Override - public void engineStatusChanged(Engine.Status status) { - - } + @Override + public void engineStatusChanged(Engine.Status status) {} } diff --git a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIReasonerView.java b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIReasonerView.java index 593cfc68f..8428a6fd2 100644 --- a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIReasonerView.java +++ b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIReasonerView.java @@ -4,6 +4,8 @@ import org.integratedmodelling.common.utils.Utils; import org.integratedmodelling.klab.api.configuration.Configuration; import org.integratedmodelling.klab.api.data.Version; +import org.integratedmodelling.klab.api.digitaltwin.DigitalTwin; +import org.integratedmodelling.klab.api.geometry.Geometry; import org.integratedmodelling.klab.api.knowledge.Concept; import org.integratedmodelling.klab.api.knowledge.DescriptionType; import org.integratedmodelling.klab.api.knowledge.SemanticType; @@ -152,6 +154,8 @@ public void run() { var reasoner = ctx.getService(org.integratedmodelling.klab.api.services.Reasoner.class); var observable = reasoner.resolveObservable(urn); + var geom = geometry == null ? null : Geometry.create(geometry); + if (observable == null) { err.println( CommandLine.Help.Ansi.AUTO.string( @@ -169,6 +173,10 @@ public void run() { observable = observable.builder(ctx).as(DescriptionType.ACKNOWLEDGEMENT).build(); } + var observation = + DigitalTwin.createObservation( + KlabCLI.INSTANCE.modeler().getCurrentScope(), observable, geometry); + out.println( CommandLine.Help.Ansi.AUTO.string( "Observation strategies for @|bold " @@ -176,11 +184,12 @@ public void run() { + "|@ of @|green " + observable.getUrn() + "|@:")); - // for (var strategy : reasoner.inferStrategies(observable, ctx)) { - // out.println(Utils.Strings.indent(strategy.toString(), - // Utils.Strings.fillUpLeftAligned(strategy.getCost() + ".", - // " ", 4))); - // } + for (var strategy : reasoner.computeObservationStrategies(observation, ctx)) { + out.println( + Utils.Strings.indent( + strategy.toString(), + Utils.Strings.fillUpLeftAligned(strategy.getRank() + ".", " ", 4))); + } } } diff --git a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIResourcesView.java b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIResourcesView.java index d7d5f7d68..00df2ba9f 100644 --- a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIResourcesView.java +++ b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIResourcesView.java @@ -1,10 +1,10 @@ package org.integratedmodelling.cli.views; +import java.io.PrintStream; +import java.io.PrintWriter; import org.integratedmodelling.cli.KlabCLI; -import org.integratedmodelling.common.utils.Utils; import org.integratedmodelling.klab.api.data.Version; import org.integratedmodelling.klab.api.engine.Engine; -import org.integratedmodelling.klab.api.exceptions.KlabIllegalStateException; import org.integratedmodelling.klab.api.services.ResourcesService; import org.integratedmodelling.klab.api.view.modeler.navigation.NavigableAsset; import org.integratedmodelling.klab.api.view.modeler.navigation.NavigableContainer; @@ -12,10 +12,6 @@ import org.integratedmodelling.klab.api.view.modeler.views.controllers.ResourcesNavigatorController; import picocli.CommandLine; -import java.io.File; -import java.io.PrintStream; -import java.io.PrintWriter; - @CommandLine.Command( name = "resources", mixinStandardHelpOptions = true, diff --git a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIServicesView.java b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIServicesView.java index 4d740c5c2..4832a5cd4 100644 --- a/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIServicesView.java +++ b/klab.cli/src/main/java/org/integratedmodelling/cli/views/CLIServicesView.java @@ -1,327 +1,377 @@ package org.integratedmodelling.cli.views; -import org.checkerframework.checker.units.qual.K; +import java.io.PrintWriter; +import java.util.concurrent.atomic.AtomicReference; import org.integratedmodelling.cli.KlabCLI; import org.integratedmodelling.common.services.client.ServiceClient; import org.integratedmodelling.common.utils.Utils; import org.integratedmodelling.klab.api.ServicesAPI; import org.integratedmodelling.klab.api.engine.Engine; -import org.integratedmodelling.klab.api.engine.distribution.RunningInstance; import org.integratedmodelling.klab.api.services.KlabService; -import org.integratedmodelling.klab.api.services.Reasoner; import org.integratedmodelling.klab.api.view.modeler.views.ServicesView; import org.integratedmodelling.klab.api.view.modeler.views.controllers.ServicesViewController; import picocli.CommandLine; -import java.io.PrintWriter; -import java.util.concurrent.atomic.AtomicReference; - -@CommandLine.Command(name = "services", - mixinStandardHelpOptions = true, - description = {"List, select and control services.", "Services can be started locally " + - "or connected from the k" - + ".LAB network.", "Service discovery is supported according to credentials.", - ""}, - subcommands = {org.integratedmodelling.cli.views.CLIServicesView.Connect.class, - CLIServicesView.Resources.class, CLIServicesView.Runtime.class}) +@CommandLine.Command( + name = "services", + mixinStandardHelpOptions = true, + description = { + "List, select and control services.", + "Services can be started locally " + "or connected from the k" + ".LAB network.", + "Service discovery is supported according to credentials.", + "" + }, + subcommands = { + org.integratedmodelling.cli.views.CLIServicesView.Connect.class, + CLIServicesView.Resources.class, + CLIServicesView.Runtime.class + }) public class CLIServicesView extends CLIView implements Runnable, ServicesView { - private static ServicesViewController controller; - private static AtomicReference status = new AtomicReference<>(); - - public CLIServicesView() { - controller = KlabCLI.INSTANCE.modeler().viewController(ServicesViewController.class); - controller.registerView(this); - } - - @CommandLine.Spec - CommandLine.Model.CommandSpec commandSpec; - - @CommandLine.Option(names = {"-v", "--verbose"}, - defaultValue = "false", - description = {"Display status and capabilities from services"}, - required = false) - boolean verbose = false; - - @CommandLine.Option(names = {"-rs", "--reasoners"}, - defaultValue = "false", - description = {"List all reasoner services."}, - required = false) - boolean reasoners = false; - - @CommandLine.Option(names = {"-rv", "--resolvers"}, - defaultValue = "false", - description = {"List all resolver services."}, - required = false) - boolean resolvers = false; - - @CommandLine.Option(names = {"-rn", "--runtimes"}, - defaultValue = "false", - description = {"List all runtime services."}, - required = false) - boolean runtimes = false; - - @CommandLine.Option(names = {"-rr", "--resources"}, - defaultValue = "false", - description = {"List all resource services."}, - required = false) - boolean resources = false; - - @CommandLine.Option(names = {"-c", "--community"}, - defaultValue = "false", - description = {"List all community services."}, - required = false) - boolean community = false; - - @Override - public void run() { - - PrintWriter out = commandSpec.commandLine().getOut(); - - /* - * TODO Print generic info about the service scope and the discovery strategy - * installed. - */ - - for (var serviceType : new KlabService.Type[]{KlabService.Type.REASONER, KlabService.Type.RESOURCES - , KlabService.Type.RESOLVER, KlabService.Type.RUNTIME, KlabService.Type.COMMUNITY, - KlabService.Type.ENGINE}) { - - if (serviceType == KlabService.Type.ENGINE) { - // TODO describe the engine - } else { - - boolean first = true; - for (var service : KlabCLI.INSTANCE.engine().serviceScope().getServices( - serviceType.classify())) { - - if (reasoners && serviceType != KlabService.Type.REASONER || resolvers && serviceType != KlabService.Type.RESOLVER || resources && serviceType != KlabService.Type.RESOURCES || runtimes && serviceType != KlabService.Type.RUNTIME || community && serviceType != KlabService.Type.COMMUNITY) { - continue; - } - - /* - * TODO tag each service with a keyword or parameter so that it can be easily - * referenced using connect. Keep the services dictionary in the superclass. - */ - - if (first) { - // out.println(serviceType); - // TODO number for selection; highlight the name of the "current" service in - // each category - out.println(" " + Utils.Paths.getLast( - service.getClass().getName(), - '.') + ": " + service.getServiceName() + " " + " [" + (service.status().isAvailable() ? "available" : "not available") + (service instanceof ServiceClient client && client.isLocal() ? "," + "local" : "") + "] " + service.getUrl() + ServicesAPI.CAPABILITIES); - if (verbose) { - out.println(Utils.Strings.indent(Utils.Json.printAsJson( - service.capabilities(KlabCLI.INSTANCE.engine().getUsers().get(0))), 6)); - } - } - first = false; - } + private static ServicesViewController controller; + private static AtomicReference status = new AtomicReference<>(); + + public CLIServicesView() { + controller = KlabCLI.INSTANCE.modeler().viewController(ServicesViewController.class); + controller.registerView(this); + } + + @CommandLine.Spec CommandLine.Model.CommandSpec commandSpec; + + @CommandLine.Option( + names = {"-v", "--verbose"}, + defaultValue = "false", + description = {"Display status and capabilities from services"}, + required = false) + boolean verbose = false; + + @CommandLine.Option( + names = {"-rs", "--reasoners"}, + defaultValue = "false", + description = {"List all reasoner services."}, + required = false) + boolean reasoners = false; + + @CommandLine.Option( + names = {"-rv", "--resolvers"}, + defaultValue = "false", + description = {"List all resolver services."}, + required = false) + boolean resolvers = false; + + @CommandLine.Option( + names = {"-rn", "--runtimes"}, + defaultValue = "false", + description = {"List all runtime services."}, + required = false) + boolean runtimes = false; + + @CommandLine.Option( + names = {"-rr", "--resources"}, + defaultValue = "false", + description = {"List all resource services."}, + required = false) + boolean resources = false; + + @CommandLine.Option( + names = {"-c", "--community"}, + defaultValue = "false", + description = {"List all community services."}, + required = false) + boolean community = false; + + @Override + public void run() { + + PrintWriter out = commandSpec.commandLine().getOut(); + + /* + * TODO Print generic info about the service scope and the discovery strategy + * installed. + */ + + for (var serviceType : + new KlabService.Type[] { + KlabService.Type.REASONER, + KlabService.Type.RESOURCES, + KlabService.Type.RESOLVER, + KlabService.Type.RUNTIME, + KlabService.Type.COMMUNITY, + KlabService.Type.ENGINE + }) { + + if (serviceType == KlabService.Type.ENGINE) { + // TODO describe the engine + } else { + + boolean first = true; + for (var service : + KlabCLI.INSTANCE.engine().serviceScope().getServices(serviceType.classify())) { + + if (reasoners && serviceType != KlabService.Type.REASONER + || resolvers && serviceType != KlabService.Type.RESOLVER + || resources && serviceType != KlabService.Type.RESOURCES + || runtimes && serviceType != KlabService.Type.RUNTIME + || community && serviceType != KlabService.Type.COMMUNITY) { + continue; + } + + /* + * TODO tag each service with a keyword or parameter so that it can be easily + * referenced using connect. Keep the services dictionary in the superclass. + */ + + if (first) { + // out.println(serviceType); + // TODO number for selection; highlight the name of the "current" service in + // each category + out.println( + " " + + Utils.Paths.getLast(service.getClass().getName(), '.') + + ": " + + service.getServiceName() + + " " + + " [" + + (service.status().isAvailable() ? "available" : "not available") + + (service instanceof ServiceClient client && client.isLocal() + ? "," + "local" + : "") + + "] " + + service.getUrl() + + ServicesAPI.CAPABILITIES); + if (verbose) { + out.println( + Utils.Strings.indent( + Utils.Json.printAsJson( + service.capabilities(KlabCLI.INSTANCE.engine().getUsers().get(0))), + 6)); } + } + first = false; } + } } - - public static class ServiceHandler { - - /** - * TODO add optional arguments to fill in the entire request on the CLI by passing the schema id and - * parameters. The help should list all parameters with a description (currently missing). - * - * @param serviceType - */ - protected static void importFromSchema(KlabService.Type serviceType, boolean help, - String suggestedUrn, java.util.List arguments) { - - if (help) { - - return; - } - - var service = KlabCLI.INSTANCE.user().getService(serviceType.classify()); - if (service != null) { - KlabCLI.INSTANCE.importWithSchema(service, suggestedUrn, arguments); - } - } - - /** - * TODO add optional arguments to fill in the entire request on the CLI by passing the schema id and - * parameters. The help should list all parameters with a description (currently missing). - * - * @param serviceType - */ - protected static void exportFromSchema(KlabService.Type serviceType, boolean help, - java.util.List arguments) { - - if (help) { - return; - } - - var service = KlabCLI.INSTANCE.user().getService(serviceType.classify()); - if (service != null) { - KlabCLI.INSTANCE.exportWithSchema(service, arguments); - } - } - - + } + + public static class ServiceHandler { + + /** + * TODO add optional arguments to fill in the entire request on the CLI by passing the schema id + * and parameters. The help should list all parameters with a description (currently missing). + * + * @param serviceType + */ + protected static void importFromSchema( + KlabService.Type serviceType, + boolean help, + String suggestedUrn, + java.util.List arguments) { + + if (help) { + + return; + } + + var service = KlabCLI.INSTANCE.user().getService(serviceType.classify()); + if (service != null) { + KlabCLI.INSTANCE.importWithSchema(service, suggestedUrn, arguments); + } } - // TODO help should be custom and show the available schemata - // TODO enable inline definitions - @CommandLine.Command(name = "resources", - subcommands = {Resources.Import.class, Resources.Export.class}, - mixinStandardHelpOptions = true, - description = {"Connect to an " + "available " + "service", "Makes the service " + - "available" + " for " + "requests."}) - public static class Resources extends ServiceHandler { - - @CommandLine.Option(names = {"-h", "--help"}, - defaultValue = "false", - description = {"Display available import schemata"}, - required = false) - boolean help = false; - - @CommandLine.Command(name = "import", - mixinStandardHelpOptions = false, - description = {"Connect to an " + "available " + "service", "Makes the service" + - " available" + " for " + "requests."}) - public static class Import implements Runnable { - - @CommandLine.Option(names = {"-h", "--help"}, - defaultValue = "false", - description = {"Display available import schemata"}, - required = false) - boolean help = false; - - @CommandLine.Option(names = {"-u", "--urn"}, - defaultValue = "X:X:X:X", - description = {"Pass suggested URN for import (result may differ)"}, - required = false) - String urn; - - @CommandLine.Parameters - java.util.List arguments; - - @Override - public void run() { - importFromSchema(KlabService.Type.RESOURCES, help, urn, arguments); - } - } - - @CommandLine.Command(name = "export", - mixinStandardHelpOptions = false, - description = {"Connect to an " + "available " + "service", "Makes the service" + - " available" + " for " + "requests."}) - public static class Export implements Runnable { - - @CommandLine.Option(names = {"-h", "--help"}, - defaultValue = "false", - description = {"Display available import schemata"}, - required = false) - boolean help = false; - - @CommandLine.Parameters - java.util.List arguments; - - @Override - public void run() { - exportFromSchema(KlabService.Type.RESOURCES, help, arguments); - } - } + /** + * TODO add optional arguments to fill in the entire request on the CLI by passing the schema id + * and parameters. The help should list all parameters with a description (currently missing). + * + * @param serviceType + */ + protected static void exportFromSchema( + KlabService.Type serviceType, boolean help, java.util.List arguments) { + + if (help) { + return; + } + + var service = KlabCLI.INSTANCE.user().getService(serviceType.classify()); + if (service != null) { + KlabCLI.INSTANCE.exportWithSchema(service, arguments); + } } - - // TODO help should be custom and show the available schemata - // TODO enable inline definitions - @CommandLine.Command(name = "runtime", - subcommands = {Runtime.Import.class, Runtime.Export.class}, - mixinStandardHelpOptions = true, - description = {"Connect to an " + "available " + "service", "Makes the service " + - "available" + " for " + "requests."}) - static class Runtime extends ServiceHandler { - - @CommandLine.Command(name = "import", - mixinStandardHelpOptions = false, - description = {"Connect to an " + "available " + "service", "Makes the service" + - " available" + " for " + "requests."}) - public static class Import implements Runnable { - - @CommandLine.Option(names = {"-h", "--help"}, - defaultValue = "false", - description = {"Display available import schemata"}, - required = false) - boolean help = false; - - @CommandLine.Option(names = {"-u", "--urn"}, - defaultValue = "X:X:X:X", - description = {"Pass suggested URN for import (result may differ)"}, - required = false) - String urn; - - @CommandLine.Parameters - java.util.List arguments; - - @Override - public void run() { - importFromSchema(KlabService.Type.RUNTIME, help, urn, arguments); - } - } - - @CommandLine.Command(name = "export", - mixinStandardHelpOptions = false, - description = {"Connect to an " + "available " + "service", "Makes the service" + - " available" + " for " + "requests."}) - public static class Export implements Runnable { - - @CommandLine.Option(names = {"-h", "--help"}, - defaultValue = "false", - description = {"Display available import schemata"}, - required = false) - boolean help = false; - - @CommandLine.Parameters - java.util.List arguments; - - @Override - public void run() { - exportFromSchema(KlabService.Type.RUNTIME, help, arguments); - } - } - + } + + // TODO help should be custom and show the available schemata + // TODO enable inline definitions + @CommandLine.Command( + name = "resources", + subcommands = {Resources.Import.class, Resources.Export.class}, + mixinStandardHelpOptions = true, + description = { + "Connect to an " + "available " + "service", + "Makes the service " + "available" + " for " + "requests." + }) + public static class Resources extends ServiceHandler { + + @CommandLine.Option( + names = {"-h", "--help"}, + defaultValue = "false", + description = {"Display available import schemata"}, + required = false) + boolean help = false; + + @CommandLine.Command( + name = "import", + mixinStandardHelpOptions = false, + description = { + "Connect to an " + "available " + "service", + "Makes the service" + " available" + " for " + "requests." + }) + public static class Import implements Runnable { + + @CommandLine.Option( + names = {"-h", "--help"}, + defaultValue = "false", + description = {"Display available import schemata"}, + required = false) + boolean help = false; + + @CommandLine.Option( + names = {"-u", "--urn"}, + defaultValue = "X:X:X:X", + description = {"Pass suggested URN for import (result may differ)"}, + required = false) + String urn; + + @CommandLine.Parameters java.util.List arguments; + + @Override + public void run() { + importFromSchema(KlabService.Type.RESOURCES, help, urn, arguments); + } } + @CommandLine.Command( + name = "export", + mixinStandardHelpOptions = false, + description = { + "Connect to an " + "available " + "service", + "Makes the service" + " available" + " for " + "requests." + }) + public static class Export implements Runnable { + + @CommandLine.Option( + names = {"-h", "--help"}, + defaultValue = "false", + description = {"Display available import schemata"}, + required = false) + boolean help = false; + + @CommandLine.Parameters java.util.List arguments; + + @Override + public void run() { + exportFromSchema(KlabService.Type.RESOURCES, help, arguments); + } + } + } + + // TODO help should be custom and show the available schemata + // TODO enable inline definitions + @CommandLine.Command( + name = "runtime", + subcommands = {Runtime.Import.class, Runtime.Export.class}, + mixinStandardHelpOptions = true, + description = { + "Connect to an " + "available " + "service", + "Makes the service " + "available" + " for " + "requests." + }) + static class Runtime extends ServiceHandler { + + @CommandLine.Command( + name = "import", + mixinStandardHelpOptions = false, + description = { + "Connect to an " + "available " + "service", + "Makes the service" + " available" + " for " + "requests." + }) + public static class Import implements Runnable { + + @CommandLine.Option( + names = {"-h", "--help"}, + defaultValue = "false", + description = {"Display available import schemata"}, + required = false) + boolean help = false; + + @CommandLine.Option( + names = {"-u", "--urn"}, + defaultValue = "X:X:X:X", + description = {"Pass suggested URN for import (result may differ)"}, + required = false) + String urn; + + @CommandLine.Parameters java.util.List arguments; + + @Override + public void run() { + importFromSchema(KlabService.Type.RUNTIME, help, urn, arguments); + } + } - @CommandLine.Command(name = "connect", - mixinStandardHelpOptions = true, - description = {"Connect to an " + "available " + "service", "Makes the service " + - "available" + " for " + "requests."}) - static class Connect implements Runnable { - - @CommandLine.Option(names = {"-d", "--default"}, - defaultValue = "false", - description = {"Make the connected service also the default to answer requests."}, - required = false) - boolean makeDefault = false; - - @Override - public void run() { - // TODO Auto-generated method stub - - } - + @CommandLine.Command( + name = "export", + mixinStandardHelpOptions = false, + description = { + "Connect to an " + "available " + "service", + "Makes the service" + " available" + " for " + "requests." + }) + public static class Export implements Runnable { + + @CommandLine.Option( + names = {"-h", "--help"}, + defaultValue = "false", + description = {"Display available import schemata"}, + required = false) + boolean help = false; + + @CommandLine.Parameters java.util.List arguments; + + @Override + public void run() { + exportFromSchema(KlabService.Type.RUNTIME, help, arguments); + } } + } + + @CommandLine.Command( + name = "connect", + mixinStandardHelpOptions = true, + description = { + "Connect to an " + "available " + "service", + "Makes the service " + "available" + " for " + "requests." + }) + static class Connect implements Runnable { + + @CommandLine.Option( + names = {"-d", "--default"}, + defaultValue = "false", + description = {"Make the connected service also the default to answer requests."}, + required = false) + boolean makeDefault = false; @Override - public void servicesConfigurationChanged(KlabService.ServiceCapabilities service) { + public void run() { + // TODO Auto-generated method stub } + } - @Override - public void notifyServiceStatus(KlabService.ServiceStatus status) { + @Override + public void servicesConfigurationChanged(KlabService.ServiceCapabilities service) {} - } + @Override + public void notifyServiceStatus(KlabService.ServiceStatus status) {} - @Override - public void engineStatusChanged(Engine.Status status) { - this.status.set(status); - } + @Override + public void engineStatusChanged(Engine.Status status) { + this.status.set(status); + } } diff --git a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/authentication/CRUDOperation.java b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/authentication/CRUDOperation.java index b2fb39a06..65f077e94 100644 --- a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/authentication/CRUDOperation.java +++ b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/authentication/CRUDOperation.java @@ -1,7 +1,9 @@ package org.integratedmodelling.klab.api.authentication; public enum CRUDOperation { - - CREATE, DELETE, UPDATE, UPDATE_METADATA, READ - + CREATE, + DELETE, + UPDATE, + UPDATE_METADATA, + READ } diff --git a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/authentication/ResourcePrivileges.java b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/authentication/ResourcePrivileges.java index 40de36c32..913aef91d 100644 --- a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/authentication/ResourcePrivileges.java +++ b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/authentication/ResourcePrivileges.java @@ -11,212 +11,208 @@ import java.util.Set; /** - * Permissions in k.LAB are either "*" for public and/or a list of comma-separated groups (uppercase) and/or - * usernames (lowercase). An empty permission string means "owner only" (and possibly admin, left to - * implementations). Prefixing either with a ! denies the permission for the user or group (supposedly to - * narrow a previous more general one: e.g. *,!BADGUYS). - *

- * This class parses a permission string and has methods to establish authorization given a username and a set - * of groups. + * Permissions in k.LAB are either "*" for public and/or a list of comma-separated groups + * (uppercase) and/or usernames (lowercase). An empty permission string means "owner only" (and + * possibly admin, left to implementations). Prefixing either with a ! denies the permission for the + * user or group (supposedly to narrow a previous more general one: e.g. *,!BADGUYS). + * + *

This class parses a permission string and has methods to establish authorization given a + * username and a set of groups. */ public class ResourcePrivileges implements Serializable { - private boolean isPublic; - private Set allowedGroups = new HashSet<>(); - private Set excludedGroups = new HashSet<>(); - private Set allowedUsers = new HashSet<>(); - private Set excludedUsers = new HashSet<>(); - private Set allowedServices = new HashSet<>(); - - - /** - * Use this constant instead of building an object to define publicly accessible resources. - */ - static public ResourcePrivileges PUBLIC; - - static { - PUBLIC = new ResourcePrivileges(); - PUBLIC.setPublic(true); - } - - public ResourcePrivileges() { - } - - private ResourcePrivileges(String s) { - if (s != null && !s.isEmpty()) { - String[] ss = s.split(","); - for (String token : ss) { - token = token.trim(); - if ("*".equals(token)) { - this.isPublic = true; - } else { - if (!token.equals(token.toUpperCase())) { - // lowercase - if (token.startsWith("!")) { - this.excludedUsers.add(token.substring(1)); - } else { - this.allowedUsers.add(token); - } - } else { - if (token.startsWith("!")) { - this.excludedGroups.add(token.substring(1)); - } else { - this.allowedGroups.add(token); - } - } - } + private boolean isPublic; + private Set allowedGroups = new HashSet<>(); + private Set excludedGroups = new HashSet<>(); + private Set allowedUsers = new HashSet<>(); + private Set excludedUsers = new HashSet<>(); + private Set allowedServices = new HashSet<>(); + + /** Use this constant instead of building an object to define publicly accessible resources. */ + public static ResourcePrivileges PUBLIC; + + static { + PUBLIC = new ResourcePrivileges(); + PUBLIC.setPublic(true); + } + + public ResourcePrivileges() {} + + private ResourcePrivileges(String s) { + if (s != null && !s.isEmpty()) { + String[] ss = s.split(","); + for (String token : ss) { + token = token.trim(); + if ("*".equals(token)) { + this.isPublic = true; + } else { + if (!token.equals(token.toUpperCase())) { + // lowercase + if (token.startsWith("!")) { + this.excludedUsers.add(token.substring(1)); + } else { + this.allowedUsers.add(token); } - } - } - - /** - * Create an empty permission object (to add to if wished). Its toString() method will produce the - * permission string. Note that empty permissions don't prevent access to the owner and (possibly) a root - * administrator. - * - * @return - */ - public static ResourcePrivileges empty() { - return new ResourcePrivileges(null); - } - - /** - * Create a permission object from a string. - * - * @param permissions - * @return - */ - public static ResourcePrivileges create(String permissions) { - return new ResourcePrivileges(permissions); - } - - public boolean checkAuthorization(Scope scope) { - - // Only way to get a ServiceScope here is if the same service is requesting the resource. This also - // covers scope == null, which is OK if the resource is public. - if (isPublic || scope instanceof ServiceScope) { - return true; - } - - if (scope instanceof UserScope userScope) { - return checkAuthorization(userScope.getUser().getUsername(), userScope.getUser().getGroups()); - } - - return true; - } - - public boolean checkAuthorization(String username, Collection groups) { - boolean authorized = isPublic; - if (!authorized) { - authorized = allowedUsers.contains(username); - } - if (!authorized) { - for (var group : groups) { - if (allowedGroups.contains(group.getName())) { - authorized = true; - break; - } + } else { + if (token.startsWith("!")) { + this.excludedGroups.add(token.substring(1)); + } else { + this.allowedGroups.add(token); } + } } - - boolean prevented = false; - if (authorized) { - // check if prevented - prevented = excludedUsers.contains(username); - if (!prevented) { - for (var group : groups) { - if (excludedGroups.contains(group.getName())) { - prevented = true; - break; - } - } - } + } + } + } + + /** + * Create an empty permission object (to add to if wished). Its toString() method will produce the + * permission string. Note that empty permissions don't prevent access to the owner and (possibly) + * a root administrator. + * + * @return + */ + public static ResourcePrivileges empty() { + return new ResourcePrivileges(null); + } + + /** + * Create a permission object from a string. + * + * @param permissions + * @return + */ + public static ResourcePrivileges create(String permissions) { + return new ResourcePrivileges(permissions); + } + + public boolean checkAuthorization(Scope scope) { + + // Only way to get a ServiceScope here is if the same service is requesting the resource. This + // also + // covers scope == null, which is OK if the resource is public. + if (isPublic || scope instanceof ServiceScope) { + return true; + } + + if (scope instanceof UserScope userScope) { + return checkAuthorization(userScope.getUser().getUsername(), userScope.getUser().getGroups()); + } + + return true; + } + + public boolean checkAuthorization(String username, Collection groups) { + boolean authorized = isPublic; + if (!authorized) { + authorized = allowedUsers.contains(username); + } + if (!authorized) { + for (var group : groups) { + if (allowedGroups.contains(group.getName())) { + authorized = true; + break; } - - return authorized && !prevented; - } - - public void setAllowedGroups(Set allowedGroups) { - this.allowedGroups = allowedGroups; - } - - public void setExcludedGroups(Set excludedGroups) { - this.excludedGroups = excludedGroups; - } - - public void setAllowedUsers(Set allowedUsers) { - this.allowedUsers = allowedUsers; - } - - public void setExcludedUsers(Set excludedUsers) { - this.excludedUsers = excludedUsers; - } - - public boolean isPublic() { - return isPublic; - } - - public void setPublic(boolean isPublic) { - this.isPublic = isPublic; - } - - public Set getAllowedGroups() { - return allowedGroups; - } - - public Set getExcludedGroups() { - return excludedGroups; - } - - public Set getAllowedUsers() { - return allowedUsers; - } - - public Set getExcludedUsers() { - return excludedUsers; - } - - @Override - public String toString() { - return encode(); - } - - private String encode() { - StringBuffer buffer = new StringBuffer(256); - if (isPublic) - buffer.append("*"); - for (String group : allowedGroups) { - buffer.append(buffer.isEmpty() ? "" : ",").append(group); - } - for (String user : allowedUsers) { - buffer.append(buffer.isEmpty() ? "" : ",").append(user); - } - for (String group : excludedGroups) { - buffer.append(buffer.isEmpty() ? "" : ",").append("!").append(group); - } - for (String user : excludedUsers) { - buffer.append(buffer.isEmpty() ? "" : ",").append("!").append(user); + } + } + + boolean prevented = false; + if (authorized) { + // check if prevented + prevented = excludedUsers.contains(username); + if (!prevented) { + for (var group : groups) { + if (excludedGroups.contains(group.getName())) { + prevented = true; + break; + } } - return buffer.toString(); - } - - public Set getAllowedServices() { - return allowedServices; - } - - public void setAllowedServices(Set allowedServices) { - this.allowedServices = allowedServices; - } - - /** - * This is returned by the API and should only show the privileges that are not beyond the passed scope's - * pay grade. - * - * @param scope - * @return - */ - public ResourcePrivileges asSeenByScope(Scope scope) { - // TODO filter privileges to those visible by the scope - return this; - } + } + } + + return authorized && !prevented; + } + + public void setAllowedGroups(Set allowedGroups) { + this.allowedGroups = allowedGroups; + } + + public void setExcludedGroups(Set excludedGroups) { + this.excludedGroups = excludedGroups; + } + + public void setAllowedUsers(Set allowedUsers) { + this.allowedUsers = allowedUsers; + } + + public void setExcludedUsers(Set excludedUsers) { + this.excludedUsers = excludedUsers; + } + + public boolean isPublic() { + return isPublic; + } + + public void setPublic(boolean isPublic) { + this.isPublic = isPublic; + } + + public Set getAllowedGroups() { + return allowedGroups; + } + + public Set getExcludedGroups() { + return excludedGroups; + } + + public Set getAllowedUsers() { + return allowedUsers; + } + + public Set getExcludedUsers() { + return excludedUsers; + } + + @Override + public String toString() { + return encode(); + } + + private String encode() { + StringBuffer buffer = new StringBuffer(256); + if (isPublic) buffer.append("*"); + for (String group : allowedGroups) { + buffer.append(buffer.isEmpty() ? "" : ",").append(group); + } + for (String user : allowedUsers) { + buffer.append(buffer.isEmpty() ? "" : ",").append(user); + } + for (String group : excludedGroups) { + buffer.append(buffer.isEmpty() ? "" : ",").append("!").append(group); + } + for (String user : excludedUsers) { + buffer.append(buffer.isEmpty() ? "" : ",").append("!").append(user); + } + return buffer.toString(); + } + + public Set getAllowedServices() { + return allowedServices; + } + + public void setAllowedServices(Set allowedServices) { + this.allowedServices = allowedServices; + } + + /** + * This is returned by the API and should only show the privileges that are not beyond the passed + * scope's pay grade. + * + * @param scope + * @return + */ + public ResourcePrivileges asSeenByScope(Scope scope) { + // TODO filter privileges to those visible by the scope + return this; + } } diff --git a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/knowledge/SemanticType.java b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/knowledge/SemanticType.java index b92768e99..d5c7c5da5 100644 --- a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/knowledge/SemanticType.java +++ b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/knowledge/SemanticType.java @@ -11,544 +11,555 @@ * @author ferdinando.villa */ public enum SemanticType { - /** - * - */ - OBSERVABLE, - /** - * Predicates are traits, roles and domains. - */ - PREDICATE, - /** - * - */ - QUALITY, - /** - * - */ - PROCESS, - /** - * - */ - SUBJECT, - /** - * - */ - EVENT, - /** - * - */ - RELATIONSHIP, - /** - * - */ - EXTENSIVE, - /** - * - */ - INTENSIVE, - /** - * - */ - TRAIT, - /** - * - */ - IDENTITY, - /** - * - */ - ATTRIBUTE, - /** - * - */ - REALM, - /** - * - */ - SUBJECTIVE, - /** - * - */ - INTERNAL, - /** - * - */ - ROLE, - /** - * - */ - DENIABLE, - /** - * - */ - CONFIGURATION, - /** - * - */ - ABSTRACT, - /** - * - */ - NOTHING, - /** - * - */ - ORDERING, - /** - * - */ - CLASS, - /** - * - */ - QUANTITY, - /** - * - */ - DOMAIN, - /** - * - */ - ENERGY, - /** - * - */ - ENTROPY, - /** - * - */ - LENGTH, - /** - * - */ - MASS, - /** - * - */ - VOLUME, - /** - * - */ - WEIGHT, - /** - * - */ - MONEY, - /** - * - */ - DURATION, - /** - * - */ - AREA, - /** - * - */ - ACCELERATION, - /** - * - */ - PRIORITY, - /** - * - */ - ELECTRIC_POTENTIAL, - /** - * - */ - CHARGE, - /** - * - */ - RESISTANCE, - /** - * - */ - RESISTIVITY, - /** - * - */ - PRESSURE, - /** - * - */ - ANGLE, - /** - * - */ - VELOCITY, - /** - * - */ - TEMPERATURE, - /** - * - */ - VISCOSITY, - /** - * - */ - AGENT, - /** - * - */ - FUNCTIONAL, - /** - * - */ - STRUCTURAL, - /** - * - */ - BIDIRECTIONAL, - /** - * - */ - UNIDIRECTIONAL, - /** - * - */ - DELIBERATIVE, - /** - * - */ - INTERACTIVE, - /** - * - */ - REACTIVE, - /** - * - */ - DIRECT_OBSERVABLE, - /** - * - */ - COUNTABLE, - /** - * - */ - UNCERTAINTY, - /** - * - */ - PROBABILITY, - /** - * - */ - PROPORTION, - /** - * - */ - PERCENTAGE, - /** - * - */ - NUMEROSITY, - /** - * - */ - DISTANCE, - /** - * - */ - RATIO, - /** - * - */ - VALUE, - /** - * - */ - OCCURRENCE, - /** - * - */ - PRESENCE, - /** - * - */ - EXTENT, - /** - * - */ - MACRO, - /** - * - */ - AMOUNT, + /** */ + OBSERVABLE, + /** Predicates are traits, roles and domains. */ + PREDICATE, + /** */ + QUALITY, + /** */ + PROCESS, + /** */ + SUBJECT, + /** */ + EVENT, + /** */ + RELATIONSHIP, + /** */ + EXTENSIVE, + /** */ + INTENSIVE, + /** */ + TRAIT, + /** */ + IDENTITY, + /** */ + ATTRIBUTE, + /** */ + REALM, + /** */ + SUBJECTIVE, + /** */ + INTERNAL, + /** */ + ROLE, + /** */ + DENIABLE, + /** */ + CONFIGURATION, + /** */ + ABSTRACT, + /** */ + NOTHING, + /** */ + ORDERING, + /** */ + CLASS, + /** */ + QUANTITY, + /** */ + DOMAIN, + /** */ + ENERGY, + /** */ + ENTROPY, + /** */ + LENGTH, + /** */ + MASS, + /** */ + VOLUME, + /** */ + WEIGHT, + /** */ + MONEY, + /** */ + DURATION, + /** */ + AREA, + /** */ + ACCELERATION, + /** */ + PRIORITY, + /** */ + ELECTRIC_POTENTIAL, + /** */ + CHARGE, + /** */ + RESISTANCE, + /** */ + RESISTIVITY, + /** */ + PRESSURE, + /** */ + ANGLE, + /** */ + VELOCITY, + /** */ + TEMPERATURE, + /** */ + VISCOSITY, + /** */ + AGENT, + /** */ + FUNCTIONAL, + /** */ + STRUCTURAL, + /** */ + BIDIRECTIONAL, + /** */ + UNIDIRECTIONAL, + /** */ + DELIBERATIVE, + /** */ + INTERACTIVE, + /** */ + REACTIVE, + /** */ + DIRECT_OBSERVABLE, + /** */ + COUNTABLE, + /** */ + UNCERTAINTY, + /** */ + PROBABILITY, + /** */ + PROPORTION, + /** */ + PERCENTAGE, + /** */ + NUMEROSITY, + /** */ + DISTANCE, + /** */ + RATIO, + /** */ + VALUE, + /** */ + OCCURRENCE, + /** */ + PRESENCE, + /** */ + EXTENT, + /** */ + MACRO, + /** */ + AMOUNT, - /** - * Only for concept peers of non-semantic types: this should never appear in a declared concept - */ - CATEGORY, - /** - * - */ - MAGNITUDE, - /** - * A quality that can be quantified numerically - */ - QUANTIFIABLE, - /** - * Reserved for unions built from declarations - */ - UNION, - /** - * Reserved for intersections built from declarations - */ - INTERSECTION, - /** - * Specifier for values; affects validation of currencies - */ - MONETARY_VALUE, - /** - * Makes an attribute a rescaling transformation, which does not preserve observation semantics - */ - RESCALING, - /** - * A process that defines the change of its inherent quality. - */ - CHANGE, - /** - * A quality that describes the speed of change of its inherent quality. - */ - RATE, - /** - * An event that results from a change of value in the inherent quality. - */ - CHANGED, - /** - * A supertype with sealed closure - */ - SEALED, - /** - * Concept that have the syntax of authority references (with the uppercase namespace) get this type even - * if not recognized by an online authority (in which case they won't have the IDENTITY type but will - * still have this, so that the syntactic validation won't fail). - */ - AUTHORITY_IDENTITY; + /** + * Only for concept peers of non-semantic types: this should never appear in a declared concept + */ + CATEGORY, + /** */ + MAGNITUDE, + /** A quality that can be quantified numerically */ + QUANTIFIABLE, + /** Reserved for unions built from declarations */ + UNION, + /** Reserved for intersections built from declarations */ + INTERSECTION, + /** Specifier for values; affects validation of currencies */ + MONETARY_VALUE, + /** + * Makes an attribute a rescaling transformation, which does not preserve observation semantics + */ + RESCALING, + /** A process that defines the change of its inherent quality. */ + CHANGE, + /** A quality that describes the speed of change of its inherent quality. */ + RATE, + /** An event that results from a change of value in the inherent quality. */ + CHANGED, + /** A supertype with sealed closure */ + SEALED, + /** + * Concept that have the syntax of authority references (with the uppercase namespace) get this + * type even if not recognized by an online authority (in which case they won't have the IDENTITY + * type but will still have this, so that the syntactic validation won't fail). + */ + AUTHORITY_IDENTITY; - public boolean isNumeric() { - return CONTINUOUS_QUALITY_TYPES.contains(this); - } + public boolean isNumeric() { + return CONTINUOUS_QUALITY_TYPES.contains(this); + } - public boolean isQuality() { - return ALL_QUALITY_TYPES.contains(this); - } + public boolean isQuality() { + return ALL_QUALITY_TYPES.contains(this); + } - public boolean admitsUnits() { - return this == EXTENSIVE || this == INTENSIVE || this == NUMEROSITY; - } + public boolean admitsUnits() { + return this == EXTENSIVE || this == INTENSIVE || this == NUMEROSITY; + } - public boolean admitsCurrency() { - return this == MONETARY_VALUE; - } + public boolean admitsCurrency() { + return this == MONETARY_VALUE; + } - public boolean isSubstantial() { - return DIRECT_OBSERVABLE_TYPES.contains(this); - } + public boolean isSubstantial() { + return DIRECT_OBSERVABLE_TYPES.contains(this); + } - public static boolean isSubstantial(Set type) { - var set = EnumSet.copyOf(type); - set.retainAll(DIRECT_OBSERVABLE_TYPES); - return !set.isEmpty(); - } + public static boolean isSubstantial(Set type) { + var set = EnumSet.copyOf(type); + set.retainAll(DIRECT_OBSERVABLE_TYPES); + return !set.isEmpty(); + } - public boolean isPredicate() { - return this == ROLE || TRAIT_TYPES.contains(this); - } + public boolean isPredicate() { + return this == ROLE || TRAIT_TYPES.contains(this); + } - public boolean isTrait() { - return TRAIT_TYPES.contains(this); - } + public boolean isTrait() { + return TRAIT_TYPES.contains(this); + } - /** - * All declarable concept bits set. Each observable AND this must yield a set of size 1. - */ - public static final EnumSet FUNDAMENTAL_TYPES = EnumSet.of(SemanticType.QUALITY, - SemanticType.SUBJECT, SemanticType.AGENT, SemanticType.EVENT, SemanticType.CONFIGURATION, - SemanticType.DOMAIN, SemanticType.RELATIONSHIP, SemanticType.EXTENT, SemanticType.PROCESS, - SemanticType.ATTRIBUTE, SemanticType.REALM, SemanticType.IDENTITY, SemanticType.ROLE); + /** All declarable concept bits set. Each observable AND this must yield a set of size 1. */ + public static final EnumSet FUNDAMENTAL_TYPES = + EnumSet.of( + SemanticType.QUALITY, + SemanticType.SUBJECT, + SemanticType.AGENT, + SemanticType.EVENT, + SemanticType.CONFIGURATION, + SemanticType.DOMAIN, + SemanticType.RELATIONSHIP, + SemanticType.EXTENT, + SemanticType.PROCESS, + SemanticType.ATTRIBUTE, + SemanticType.REALM, + SemanticType.IDENTITY, + SemanticType.ROLE); - public static final EnumSet MODELABLE_TYPES = EnumSet.of(SemanticType.QUALITY, - SemanticType.SUBJECT, SemanticType.AGENT, SemanticType.EVENT, SemanticType.CONFIGURATION, - SemanticType.RELATIONSHIP, SemanticType.PROCESS, SemanticType.TRAIT, SemanticType.ROLE, - SemanticType.DOMAIN); + public static final EnumSet MODELABLE_TYPES = + EnumSet.of( + SemanticType.QUALITY, + SemanticType.SUBJECT, + SemanticType.AGENT, + SemanticType.EVENT, + SemanticType.CONFIGURATION, + SemanticType.RELATIONSHIP, + SemanticType.PROCESS, + SemanticType.TRAIT, + SemanticType.ROLE, + SemanticType.DOMAIN); - /** - * These need to be represented in the root domain of the ontology using core derivations. They are all - * disjoint and concrete. TODO at the moment the OPERATOR_TYPES are not directly linked to core types. - */ - public static final Set DECLARABLE_TYPES = EnumSet.of(SemanticType.PROPORTION, - SemanticType.PROBABILITY, SemanticType.DISTANCE, SemanticType.VALUE, SemanticType.OCCURRENCE, - SemanticType.PRESENCE, SemanticType.UNCERTAINTY, SemanticType.NUMEROSITY, SemanticType.RATE, - SemanticType.CLASS, SemanticType.QUANTITY, SemanticType.ENERGY, SemanticType.ENTROPY, - SemanticType.LENGTH, SemanticType.MASS, SemanticType.VOLUME, SemanticType.WEIGHT, - SemanticType.MONEY, SemanticType.DURATION, SemanticType.AREA, SemanticType.ACCELERATION, - SemanticType.PRIORITY, SemanticType.ELECTRIC_POTENTIAL, SemanticType.CHARGE, - SemanticType.IDENTITY, SemanticType.DOMAIN, - SemanticType.RESISTANCE, SemanticType.RESISTIVITY, SemanticType.PRESSURE, SemanticType.ANGLE, - SemanticType.VELOCITY, SemanticType.TEMPERATURE, SemanticType.VISCOSITY, SemanticType.RATIO, - SemanticType.AMOUNT, SemanticType.SUBJECT, SemanticType.AGENT, SemanticType.EVENT, - SemanticType.RELATIONSHIP, SemanticType.PROCESS, SemanticType.CONFIGURATION, SemanticType.ROLE, - SemanticType.ATTRIBUTE, SemanticType.REALM, SemanticType.ORDERING); + /** + * These need to be represented in the root domain of the ontology using core derivations. They + * are all disjoint and concrete. TODO at the moment the OPERATOR_TYPES are not directly linked to + * core types. + */ + public static final Set DECLARABLE_TYPES = + EnumSet.of( + SemanticType.PROPORTION, + SemanticType.PROBABILITY, + SemanticType.DISTANCE, + SemanticType.VALUE, + SemanticType.OCCURRENCE, + SemanticType.PRESENCE, + SemanticType.UNCERTAINTY, + SemanticType.NUMEROSITY, + SemanticType.RATE, + SemanticType.CLASS, + SemanticType.QUANTITY, + SemanticType.ENERGY, + SemanticType.ENTROPY, + SemanticType.LENGTH, + SemanticType.MASS, + SemanticType.VOLUME, + SemanticType.WEIGHT, + SemanticType.MONEY, + SemanticType.DURATION, + SemanticType.AREA, + SemanticType.ACCELERATION, + SemanticType.PRIORITY, + SemanticType.ELECTRIC_POTENTIAL, + SemanticType.CHARGE, + SemanticType.IDENTITY, + SemanticType.DOMAIN, + SemanticType.RESISTANCE, + SemanticType.RESISTIVITY, + SemanticType.PRESSURE, + SemanticType.ANGLE, + SemanticType.VELOCITY, + SemanticType.TEMPERATURE, + SemanticType.VISCOSITY, + SemanticType.RATIO, + SemanticType.AMOUNT, + SemanticType.SUBJECT, + SemanticType.AGENT, + SemanticType.EVENT, + SemanticType.RELATIONSHIP, + SemanticType.PROCESS, + SemanticType.CONFIGURATION, + SemanticType.ROLE, + SemanticType.ATTRIBUTE, + SemanticType.REALM, + SemanticType.ORDERING); - /** - * Qualities that are naturally inherent and should not be allowed to have explicit inherency but just - * context. - */ - public static final EnumSet INHERENT_QUALITIES = EnumSet.of(SemanticType.PROPORTION, - SemanticType.PROBABILITY, SemanticType.DISTANCE, SemanticType.VALUE, SemanticType.OCCURRENCE, - SemanticType.PRESENCE, SemanticType.UNCERTAINTY, SemanticType.NUMEROSITY, SemanticType.RATE); + /** + * Qualities that are naturally inherent and should not be allowed to have explicit inherency but + * just context. + */ + public static final EnumSet INHERENT_QUALITIES = + EnumSet.of( + SemanticType.PROPORTION, + SemanticType.PROBABILITY, + SemanticType.DISTANCE, + SemanticType.VALUE, + SemanticType.OCCURRENCE, + SemanticType.PRESENCE, + SemanticType.UNCERTAINTY, + SemanticType.NUMEROSITY, + SemanticType.RATE); - public static final Set OPERATOR_TYPES = EnumSet.of(SemanticType.CHANGE, - SemanticType.NUMEROSITY, SemanticType.DISTANCE, /* FIXME MISSING: LEVEL? - no it's an ORDERING - with a described type */ SemanticType.MAGNITUDE, SemanticType.OCCURRENCE, SemanticType.PRESENCE - , SemanticType.PROBABILITY, SemanticType.PROPORTION, SemanticType.RATIO, SemanticType.CLASS, - SemanticType.UNCERTAINTY, SemanticType.VALUE, SemanticType.MONETARY_VALUE); + public static final Set OPERATOR_TYPES = + EnumSet.of( + SemanticType.CHANGE, + SemanticType.NUMEROSITY, + SemanticType.DISTANCE, /* FIXME MISSING: LEVEL? - no it's an ORDERING + with a described type */ + SemanticType.MAGNITUDE, + SemanticType.OCCURRENCE, + SemanticType.PRESENCE, + SemanticType.PROBABILITY, + SemanticType.PROPORTION, + SemanticType.RATIO, + SemanticType.CLASS, + SemanticType.UNCERTAINTY, + SemanticType.VALUE, + SemanticType.MONETARY_VALUE); - /** - * All quality type bits sets (not QUALITY itself). Each quality AND this must yield a set of size 1. - */ - public static final EnumSet QUALITY_TYPES = EnumSet.of(SemanticType.CLASS, - SemanticType.QUANTITY, SemanticType.ENERGY, SemanticType.ENTROPY, SemanticType.LENGTH, - SemanticType.MASS, SemanticType.VOLUME, SemanticType.WEIGHT, SemanticType.MONEY, - SemanticType.DURATION, SemanticType.AREA, SemanticType.ACCELERATION, SemanticType.PRIORITY, - SemanticType.ELECTRIC_POTENTIAL, SemanticType.CHARGE, SemanticType.RESISTANCE, - SemanticType.RESISTIVITY, SemanticType.PRESSURE, SemanticType.ANGLE, SemanticType.VELOCITY, - SemanticType.TEMPERATURE, SemanticType.VISCOSITY, SemanticType.UNCERTAINTY, SemanticType.RATIO, - SemanticType.PROPORTION, SemanticType.PROBABILITY, SemanticType.NUMEROSITY, - SemanticType.DISTANCE, SemanticType.VALUE, SemanticType.MONETARY_VALUE, SemanticType.OCCURRENCE - , SemanticType.PRESENCE, SemanticType.AMOUNT, SemanticType.RATE); + /** + * All quality type bits sets (not QUALITY itself). Each quality AND this must yield a set of size + * 1. + */ + public static final EnumSet QUALITY_TYPES = + EnumSet.of( + SemanticType.CLASS, + SemanticType.QUANTITY, + SemanticType.ENERGY, + SemanticType.ENTROPY, + SemanticType.LENGTH, + SemanticType.MASS, + SemanticType.VOLUME, + SemanticType.WEIGHT, + SemanticType.MONEY, + SemanticType.DURATION, + SemanticType.AREA, + SemanticType.ACCELERATION, + SemanticType.PRIORITY, + SemanticType.ELECTRIC_POTENTIAL, + SemanticType.CHARGE, + SemanticType.RESISTANCE, + SemanticType.RESISTIVITY, + SemanticType.PRESSURE, + SemanticType.ANGLE, + SemanticType.VELOCITY, + SemanticType.TEMPERATURE, + SemanticType.VISCOSITY, + SemanticType.UNCERTAINTY, + SemanticType.RATIO, + SemanticType.PROPORTION, + SemanticType.PROBABILITY, + SemanticType.NUMEROSITY, + SemanticType.DISTANCE, + SemanticType.VALUE, + SemanticType.MONETARY_VALUE, + SemanticType.OCCURRENCE, + SemanticType.PRESENCE, + SemanticType.AMOUNT, + SemanticType.RATE); - /** - * All quality type bits sets including QUALITY itself. Each quality AND this must yield a set of size 0. - */ - public static final EnumSet ALL_QUALITY_TYPES = EnumSet.of(SemanticType.CLASS, - SemanticType.QUALITY, SemanticType.QUANTITY, SemanticType.ENERGY, SemanticType.ENTROPY, - SemanticType.LENGTH, SemanticType.MASS, SemanticType.VOLUME, SemanticType.WEIGHT, - SemanticType.MONEY, SemanticType.DURATION, SemanticType.AREA, SemanticType.ACCELERATION, - SemanticType.PRIORITY, SemanticType.ELECTRIC_POTENTIAL, SemanticType.CHARGE, - SemanticType.RESISTANCE, SemanticType.RESISTIVITY, SemanticType.PRESSURE, SemanticType.ANGLE, - SemanticType.VELOCITY, SemanticType.TEMPERATURE, SemanticType.VISCOSITY, - SemanticType.UNCERTAINTY, SemanticType.RATIO, SemanticType.PROPORTION, SemanticType.PROBABILITY - , SemanticType.NUMEROSITY, SemanticType.DISTANCE, SemanticType.VALUE, SemanticType.OCCURRENCE, - SemanticType.PRESENCE, SemanticType.AMOUNT, SemanticType.RATE, SemanticType.MONETARY_VALUE); + /** + * All quality type bits sets including QUALITY itself. Each quality AND this must yield a set of + * size 0. + */ + public static final EnumSet ALL_QUALITY_TYPES = + EnumSet.of( + SemanticType.CLASS, + SemanticType.QUALITY, + SemanticType.QUANTITY, + SemanticType.ENERGY, + SemanticType.ENTROPY, + SemanticType.LENGTH, + SemanticType.MASS, + SemanticType.VOLUME, + SemanticType.WEIGHT, + SemanticType.MONEY, + SemanticType.DURATION, + SemanticType.AREA, + SemanticType.ACCELERATION, + SemanticType.PRIORITY, + SemanticType.ELECTRIC_POTENTIAL, + SemanticType.CHARGE, + SemanticType.RESISTANCE, + SemanticType.RESISTIVITY, + SemanticType.PRESSURE, + SemanticType.ANGLE, + SemanticType.VELOCITY, + SemanticType.TEMPERATURE, + SemanticType.VISCOSITY, + SemanticType.UNCERTAINTY, + SemanticType.RATIO, + SemanticType.PROPORTION, + SemanticType.PROBABILITY, + SemanticType.NUMEROSITY, + SemanticType.DISTANCE, + SemanticType.VALUE, + SemanticType.OCCURRENCE, + SemanticType.PRESENCE, + SemanticType.AMOUNT, + SemanticType.RATE, + SemanticType.MONETARY_VALUE); - /** - * All qualities that are expressed through a continuous numeric state. - */ - public static final EnumSet CONTINUOUS_QUALITY_TYPES = EnumSet.of(SemanticType.QUANTITY, - SemanticType.ENERGY, SemanticType.ENTROPY, SemanticType.LENGTH, SemanticType.MASS, - SemanticType.VOLUME, SemanticType.WEIGHT, SemanticType.MONEY, SemanticType.DURATION, - SemanticType.AREA, SemanticType.ACCELERATION, SemanticType.PRIORITY, - SemanticType.ELECTRIC_POTENTIAL, SemanticType.CHARGE, SemanticType.RESISTANCE, - SemanticType.RESISTIVITY, SemanticType.PRESSURE, SemanticType.ANGLE, SemanticType.VELOCITY, - SemanticType.TEMPERATURE, SemanticType.VISCOSITY, SemanticType.UNCERTAINTY, SemanticType.RATIO, - SemanticType.PROPORTION, SemanticType.PROBABILITY, SemanticType.NUMEROSITY, - SemanticType.DISTANCE, SemanticType.VALUE, SemanticType.OCCURRENCE, SemanticType.PRESENCE, - SemanticType.AMOUNT, SemanticType.MAGNITUDE, SemanticType.RATE, SemanticType.MONETARY_VALUE); + /** All qualities that are expressed through a continuous numeric state. */ + public static final EnumSet CONTINUOUS_QUALITY_TYPES = + EnumSet.of( + SemanticType.QUANTITY, + SemanticType.ENERGY, + SemanticType.ENTROPY, + SemanticType.LENGTH, + SemanticType.MASS, + SemanticType.VOLUME, + SemanticType.WEIGHT, + SemanticType.MONEY, + SemanticType.DURATION, + SemanticType.AREA, + SemanticType.ACCELERATION, + SemanticType.PRIORITY, + SemanticType.ELECTRIC_POTENTIAL, + SemanticType.CHARGE, + SemanticType.RESISTANCE, + SemanticType.RESISTIVITY, + SemanticType.PRESSURE, + SemanticType.ANGLE, + SemanticType.VELOCITY, + SemanticType.TEMPERATURE, + SemanticType.VISCOSITY, + SemanticType.UNCERTAINTY, + SemanticType.RATIO, + SemanticType.PROPORTION, + SemanticType.PROBABILITY, + SemanticType.NUMEROSITY, + SemanticType.DISTANCE, + SemanticType.VALUE, + SemanticType.OCCURRENCE, + SemanticType.PRESENCE, + SemanticType.AMOUNT, + SemanticType.MAGNITUDE, + SemanticType.RATE, + SemanticType.MONETARY_VALUE); - /** - * All direct observables - */ - public final static EnumSet DIRECT_OBSERVABLE_TYPES = - EnumSet.of(SemanticType.DIRECT_OBSERVABLE, SemanticType.SUBJECT, SemanticType.AGENT, - SemanticType.EVENT, SemanticType.RELATIONSHIP, SemanticType.PROCESS, - SemanticType.CONFIGURATION, SemanticType.COUNTABLE, /* FIXME ??? */SemanticType.ABSTRACT); + /** All direct observables */ + public static final EnumSet DIRECT_OBSERVABLE_TYPES = + EnumSet.of( + SemanticType.DIRECT_OBSERVABLE, + SemanticType.SUBJECT, + SemanticType.AGENT, + SemanticType.EVENT, + SemanticType.RELATIONSHIP, + SemanticType.PROCESS, + SemanticType.CONFIGURATION, + SemanticType.COUNTABLE, /* FIXME ??? */ + SemanticType.ABSTRACT); - /** - * All base observables - */ - public final static EnumSet BASE_OBSERVABLE_TYPES = EnumSet.of(SemanticType.SUBJECT, - SemanticType.EVENT, SemanticType.RELATIONSHIP, SemanticType.PROCESS, SemanticType.QUALITY, - SemanticType.AGENT); + /** All base observables */ + public static final EnumSet BASE_OBSERVABLE_TYPES = + EnumSet.of( + SemanticType.SUBJECT, + SemanticType.EVENT, + SemanticType.RELATIONSHIP, + SemanticType.PROCESS, + SemanticType.QUALITY, + SemanticType.AGENT); - /** - * Everything we can write a model for - */ - public final static EnumSet BASE_MODELABLE_TYPES = EnumSet.of(SemanticType.SUBJECT, - SemanticType.EVENT, SemanticType.RELATIONSHIP, SemanticType.PROCESS, SemanticType.QUALITY, - SemanticType.AGENT, SemanticType.TRAIT, SemanticType.CONFIGURATION); + /** Everything we can write a model for */ + public static final EnumSet BASE_MODELABLE_TYPES = + EnumSet.of( + SemanticType.SUBJECT, + SemanticType.EVENT, + SemanticType.RELATIONSHIP, + SemanticType.PROCESS, + SemanticType.QUALITY, + SemanticType.AGENT, + SemanticType.TRAIT, + SemanticType.CONFIGURATION); - /** - * All trait type bits set (not TRAIT itself). Each trait AND this must yield a set of size 1. - */ - public static final EnumSet TRAIT_TYPES = EnumSet.of(SemanticType.ATTRIBUTE, - SemanticType.REALM, SemanticType.IDENTITY); + /** All trait type bits set (not TRAIT itself). Each trait AND this must yield a set of size 1. */ + public static final EnumSet TRAIT_TYPES = + EnumSet.of(SemanticType.ATTRIBUTE, SemanticType.REALM, SemanticType.IDENTITY); - /** - * All trait type bits set (including TRAIT itself). Each trait AND this must yield a set of size 1. - */ - public static final EnumSet ALL_TRAIT_TYPES = EnumSet.of(SemanticType.ATTRIBUTE, - SemanticType.REALM, SemanticType.IDENTITY, SemanticType.TRAIT); + /** + * All trait type bits set (including TRAIT itself). Each trait AND this must yield a set of size + * 1. + */ + public static final EnumSet ALL_TRAIT_TYPES = + EnumSet.of( + SemanticType.ATTRIBUTE, SemanticType.REALM, SemanticType.IDENTITY, SemanticType.TRAIT); - public static SemanticType fundamentalType(Collection types) { + public static SemanticType fundamentalType(Collection types) { - Set t = EnumSet.copyOf(types); - t.retainAll(FUNDAMENTAL_TYPES); - if (t.size() == 1) { - return t.iterator().next(); - } - t = EnumSet.copyOf(types); - t.retainAll(TRAIT_TYPES); - if (t.size() == 1) { - return t.iterator().next(); - } - if (types.contains(ROLE)) { - return ROLE; - } - return NOTHING; + Set t = EnumSet.copyOf(types); + t.retainAll(FUNDAMENTAL_TYPES); + if (t.size() == 1) { + return t.iterator().next(); } - - public static boolean isNumeric(Set semantics) { - EnumSet set = EnumSet.copyOf(semantics); - set.retainAll(CONTINUOUS_QUALITY_TYPES); - return !set.isEmpty(); + t = EnumSet.copyOf(types); + t.retainAll(TRAIT_TYPES); + if (t.size() == 1) { + return t.iterator().next(); } + if (types.contains(ROLE)) { + return ROLE; + } + return NOTHING; + } - static final Color CONCEPT_COLOR_UNKNOWN = new Color(255, 0, 0); - static final Color CONCEPT_COLOR_VOID = new Color(60, 60, 100); - static final Color CONCEPT_COLOR_QUALITY = new Color(0, 204, 0); - static final Color CONCEPT_COLOR_CONFIGURATION = new Color(0, 100, 100); - static final Color CONCEPT_COLOR_SUBJECT = new Color(153, 76, 0); - static final Color CONCEPT_COLOR_EVENT = new Color(153, 153, 0); - static final Color CONCEPT_COLOR_PROCESS = new Color(180, 0, 0); - static final Color CONCEPT_COLOR_RELATIONSHIP = new Color(210, 170, 0); - static final Color CONCEPT_COLOR_TRAIT = new Color(0, 102, 204); - static final Color CONCEPT_COLOR_ROLE = new Color(0, 86, 163); - static final Color CONCEPT_COLOR_EXTENT = new Color(0, 153, 153); - static final Color CONCEPT_COLOR_DOMAIN = new Color(220, 220, 103); + public static boolean isNumeric(Set semantics) { + EnumSet set = EnumSet.copyOf(semantics); + set.retainAll(CONTINUOUS_QUALITY_TYPES); + return !set.isEmpty(); + } - /** - * Source of truth for the UI color corresponding to different semantic categories. - * - * @param semantics - * @return - */ - public static Color getColor(Set semantics) { - Set fundamental = EnumSet.copyOf(semantics); - fundamental.retainAll(MODELABLE_TYPES); - Color ret = null; - if (fundamental.size() == 1) { - ret = switch (fundamental.iterator().next()) { - case SUBJECT, AGENT -> CONCEPT_COLOR_SUBJECT; - case EVENT -> CONCEPT_COLOR_EVENT; - case DOMAIN -> CONCEPT_COLOR_DOMAIN; - case RELATIONSHIP -> CONCEPT_COLOR_RELATIONSHIP; - case PROCESS -> CONCEPT_COLOR_PROCESS; - case QUALITY -> CONCEPT_COLOR_QUALITY; - case ROLE -> CONCEPT_COLOR_ROLE; - case CONFIGURATION -> CONCEPT_COLOR_CONFIGURATION; - case TRAIT -> CONCEPT_COLOR_TRAIT; - case EXTENT -> CONCEPT_COLOR_EXTENT; - default -> null; - }; - } - return ret == null ? CONCEPT_COLOR_UNKNOWN : ret; - } + static final Color CONCEPT_COLOR_UNKNOWN = new Color(255, 0, 0); + static final Color CONCEPT_COLOR_VOID = new Color(60, 60, 100); + static final Color CONCEPT_COLOR_QUALITY = new Color(0, 204, 0); + static final Color CONCEPT_COLOR_CONFIGURATION = new Color(0, 100, 100); + static final Color CONCEPT_COLOR_SUBJECT = new Color(153, 76, 0); + static final Color CONCEPT_COLOR_EVENT = new Color(153, 153, 0); + static final Color CONCEPT_COLOR_PROCESS = new Color(116, 0, 0); + static final Color CONCEPT_COLOR_RELATIONSHIP = new Color(210, 170, 0); + static final Color CONCEPT_COLOR_TRAIT = new Color(0, 102, 204); + static final Color CONCEPT_COLOR_ROLE = new Color(0, 86, 163); + static final Color CONCEPT_COLOR_EXTENT = new Color(0, 153, 153); + static final Color CONCEPT_COLOR_DOMAIN = new Color(220, 220, 103); + /** + * Source of truth for the UI color corresponding to different semantic categories. + * + * @param semantics + * @return + */ + public static Color getColor(Set semantics) { + Set fundamental = EnumSet.copyOf(semantics); + fundamental.retainAll(MODELABLE_TYPES); + Color ret = null; + if (fundamental.size() == 1) { + ret = + switch (fundamental.iterator().next()) { + case SUBJECT, AGENT -> CONCEPT_COLOR_SUBJECT; + case EVENT -> CONCEPT_COLOR_EVENT; + case DOMAIN -> CONCEPT_COLOR_DOMAIN; + case RELATIONSHIP -> CONCEPT_COLOR_RELATIONSHIP; + case PROCESS -> CONCEPT_COLOR_PROCESS; + case QUALITY -> CONCEPT_COLOR_QUALITY; + case ROLE -> CONCEPT_COLOR_ROLE; + case CONFIGURATION -> CONCEPT_COLOR_CONFIGURATION; + case TRAIT -> CONCEPT_COLOR_TRAIT; + case EXTENT -> CONCEPT_COLOR_EXTENT; + default -> null; + }; + } + return ret == null ? CONCEPT_COLOR_UNKNOWN : ret; + } } diff --git a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/knowledge/Urn.java b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/knowledge/Urn.java index 4d2ae7f5f..63af3c26a 100644 --- a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/knowledge/Urn.java +++ b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/knowledge/Urn.java @@ -5,319 +5,300 @@ import java.util.Map; import java.util.Map.Entry; import java.util.regex.Pattern; - import org.integratedmodelling.klab.api.collections.Pair; import org.integratedmodelling.klab.api.data.Version; import org.integratedmodelling.klab.api.knowledge.organization.Project; /** - * Simple helper to decompose a URN into its constituents and access them with - * proper semantics. - * - * URN is formatted as :::[#params][@version] - * - * @author Ferd + * Simple helper to decompose a URN into its constituents and access them with proper semantics. + * + *

URN is formatted as :::[#params][@version] * + * @author Ferd */ public class Urn implements Serializable { - - final public static String SINGLE_PARAMETER_KEY = "value"; - - final public static Pattern URN_RESOURCE_PATTERN = Pattern.compile("[A-z]+:[A-z]+:[A-z]+:[A-z]+(#.+)?"); - final public static Pattern URN_KIM_OBJECT_PATTERN = Pattern.compile("[a-z]+(\\.[a-z]+)+"); - final public static Pattern URN_CONCEPT_PATTERN = Pattern.compile("[a-z]+:[A-Z]+"); - - private String urn; - private String fullUrn; - private String[] tokens; - private Map parameters = new HashMap<>(); - - public enum Type { - /** - * A resource URN - */ - RESOURCE, - /** - * A model, acknowledgement, namespace/scenario, project or define - */ - KIM_OBJECT, - /** - * A concept or observable (no guarantee that it's meaningful) - */ - OBSERVABLE, - /** - * An http-based URL, observable only when it points to a remote observation (no - * guarantee) - */ - REMOTE_URL, - /** - * Returned by classify() when the passed string cannot be understood as one of - * the above - */ - UNKNOWN - } - - /** - * Pass a valid URN string. For now does no validation. - * - * @param urn - */ - public Urn(String urn) { - fullUrn = urn; - if (urn.startsWith(KLAB_URN_PREFIX)) { - urn = urn.substring(KLAB_URN_PREFIX.length()); - } - if (urn.contains("#")) { - String[] uu = urn.split("#"); - urn = uu[0]; - for (String s : uu[1].split("&")) { - if (s.contains("=")) { - String[] kv = s.split("="); - parameters.put(kv[0], kv[1]); - } else { - if (parameters.containsKey(SINGLE_PARAMETER_KEY)) { - parameters.put(SINGLE_PARAMETER_KEY, parameters.get(SINGLE_PARAMETER_KEY) + "," + s); - } else { - parameters.put(SINGLE_PARAMETER_KEY, s); - } - } - } - } - this.urn = urn; - this.tokens = urn.split(":"); - } - - public Urn(String urn, Map urnParameters) { - this(urn); - if (urnParameters != null && !urnParameters.isEmpty()) { - this.parameters.putAll(urnParameters); - String s = ""; - for (String key : urnParameters.keySet()) { - s += (s.isEmpty() ? "" : "&") + ("value".equals(key) ? "" : (key + "=")); - String val = urnParameters.get(key); - s += val.replace(",", "&"); - } - this.fullUrn += "#" + s; - } - } - - /** - * Node name, mandatory in all URNs. In universal ones it will be "klab". In - * local ones, it will be "local". - * - * @return the node name. - */ - public String getNodeName() { - return tokens[0]; - } - - /** - * Whether the URN should be processed by the same engine that generates it. - * - * @return true if local - */ - public boolean isLocal() { - return getNodeName().equals("local"); - } - - /** - * Return either an empty array for no parameter present, or an array of values - * with one or more values for the passed parameter set in the url as - * independent parts. E.g. url#a&b&C would return a, b, C. - * - * @param parameter - * @return - */ - public String[] getSplitParameter(String parameter) { - if (parameters.containsKey(parameter)) { - String ss = parameters.get(parameter); - if (ss == null) { - ss = ""; - } - return ss.split(","); - } - return new String[] {}; - } - - /** - * Whether the URN can be processed by any node. In this case, the URN has no - * attached data and the catalog name is the ID of the adapter that will process - * it. If we don't have the adapter, we will choose a node among those that do, - * using the load factor or some other criterion. - * - * @return true if universal. - */ - public boolean isUniversal() { - return getNodeName().equals("klab"); - } - - /** - * Return the catalog for the resource. In local resources, this is the - * originator ID. In universal resources, this is the adapter ID. Never null. - * - * @return the originator - */ - public String getCatalog() { - return tokens[1]; - } - - /** - * Return the namespace of the resource. - */ - public String getNamespace() { - return tokens.length > 2 ? tokens[2] : null; - } - - /** - * Return the resource ID. Never null. - * - * @return the resource id. - */ - public String getResourceId() { - return tokens.length > 3 ? tokens[3] : null; - } - - /** - * Return the version, if any. - * - * @return - */ - public Version getVersion() { - return tokens.length > 4 ? new Version(tokens[4]) : null; - } - - /** - * Unmodified URN string without parameters - * - * @return the unmodified URN. - */ - public String getUrn() { - return urn; - } - - @Override - public String toString() { - return fullUrn; - } - - public Map getParameters() { - return parameters; - } - - public static Type classify(String urn) { - - if (urn.startsWith("http") && urn.contains("//:")) { - return Type.REMOTE_URL; - } else if (URN_RESOURCE_PATTERN.matcher(urn).find()) { - return Type.RESOURCE; - } else if (URN_KIM_OBJECT_PATTERN.matcher(urn).find()) { - return Type.KIM_OBJECT; - } else if (URN_CONCEPT_PATTERN.matcher(urn).find()) { - return Type.OBSERVABLE; - } - - return Type.UNKNOWN; - } - - final public static String KLAB_URN_PREFIX = "urn:klab:"; - final public static String LOCAL_URN_PREFIX = "urn:klab:local:"; - final public static String VOID_URN_PREFIX = "urn:klab:void:"; - final public static String LOCAL_FILE_PREFIX = "file:"; - - public static boolean isLocal(String urn) { - return urn.startsWith(LOCAL_URN_PREFIX) || urn.startsWith("local:") || urn.startsWith(LOCAL_FILE_PREFIX); - } - - public static boolean isUniversal(String urn) { - return urn.startsWith(KLAB_URN_PREFIX) || urn.startsWith("klab:"); - } - - public String getLocalUrn(String resourceId, Project project, String owner) { - return "local:" + owner + ":" + project.getUrn() + ":" + resourceId; - } - - /** - * Create a new local URN with the passed project instead of the original. - * - * @param urn - * @param projectName - * @return - */ - public static String changeLocalProject(String urn, String projectName) { - - if (!isLocal(urn)) { - throw new IllegalArgumentException("cannot change project name in non-local URN " + urn); - } - int fieldIndex = urn.startsWith(LOCAL_URN_PREFIX) ? 4 : 2; - String ret = ""; - int i = 0; - for (String field : urn.split(":")) { - ret += (ret.isEmpty() ? "" : ":") + (i == fieldIndex ? projectName : field); - i++; - } - return ret; - } - - public static Map parseParameters(String uu) { - Map ret = new HashMap<>(); - for (String s : uu.split("&")) { - if (s.contains("=")) { - String[] kv = s.split("="); - ret.put(kv[0], kv[1]); - } else { - ret.put(Urn.SINGLE_PARAMETER_KEY, s); - } - } - return ret; - } - - /** - * Split off the fragment and return the parsed parameter map along with the - * clean URN. - * - * @param urn - * @return - */ - public static Pair> resolveParameters(String urn) { - Map parameters = new HashMap<>(); - String clean = urn; - if (urn.contains("#")) { - String[] uu = urn.split("#"); - clean = uu[0]; - for (String s : uu[1].split("&")) { - if (s.contains("=")) { - String[] kv = s.split("="); - parameters.put(kv[0], kv[1]); - } else { - parameters.put(Urn.SINGLE_PARAMETER_KEY, s); - } - } - } - return Pair.of(clean, parameters); - } - - public boolean isUrn(String urn) { - // at least two colons in successive positions with something in the middle - int ln = urn.indexOf(':'); - return ln > 0 && urn.lastIndexOf(':') > (ln + 1); - } - - public static String applyParameters(String urn, Map urnParameters) { - String ret = removeParameters(urn); - if (urnParameters != null && !urnParameters.isEmpty()) { - boolean first = true; - for (Entry entry : urnParameters.entrySet()) { - ret += (first ? "#" : "&") + entry.getKey() + "=" + entry.getValue(); - } - } - return ret; - } - - public static String removeParameters(String urn) { - int pound = urn.indexOf(':'); - return pound > 0 ? urn.substring(0, pound) : urn; - } - -} \ No newline at end of file + public static final String SINGLE_PARAMETER_KEY = "value"; + + public static final Pattern URN_RESOURCE_PATTERN = + Pattern.compile("[A-z]+:[A-z]+:[A-z]+:[A-z]+(#.+)?"); + public static final Pattern URN_KIM_OBJECT_PATTERN = Pattern.compile("[a-z]+(\\.[a-z]+)+"); + public static final Pattern URN_CONCEPT_PATTERN = Pattern.compile("[a-z]+:[A-Z]+"); + + private String urn; + private String fullUrn; + private String[] tokens; + private Map parameters = new HashMap<>(); + + public enum Type { + /** A resource URN */ + RESOURCE, + /** A model, acknowledgement, namespace/scenario, project or define */ + KIM_OBJECT, + /** A concept or observable (no guarantee that it's meaningful) */ + OBSERVABLE, + /** An http-based URL, observable only when it points to a remote observation (no guarantee) */ + REMOTE_URL, + /** Returned by classify() when the passed string cannot be understood as one of the above */ + UNKNOWN + } + + /** + * Pass a valid URN string. For now does no validation. + * + * @param urn + */ + public Urn(String urn) { + fullUrn = urn; + if (urn.startsWith(KLAB_URN_PREFIX)) { + urn = urn.substring(KLAB_URN_PREFIX.length()); + } + if (urn.contains("#")) { + String[] uu = urn.split("#"); + urn = uu[0]; + for (String s : uu[1].split("&")) { + if (s.contains("=")) { + String[] kv = s.split("="); + parameters.put(kv[0], kv[1]); + } else { + if (parameters.containsKey(SINGLE_PARAMETER_KEY)) { + parameters.put(SINGLE_PARAMETER_KEY, parameters.get(SINGLE_PARAMETER_KEY) + "," + s); + } else { + parameters.put(SINGLE_PARAMETER_KEY, s); + } + } + } + } + this.urn = urn; + this.tokens = urn.split(":"); + } + + public Urn(String urn, Map urnParameters) { + this(urn); + if (urnParameters != null && !urnParameters.isEmpty()) { + this.parameters.putAll(urnParameters); + String s = ""; + for (String key : urnParameters.keySet()) { + s += (s.isEmpty() ? "" : "&") + ("value".equals(key) ? "" : (key + "=")); + String val = urnParameters.get(key); + s += val.replace(",", "&"); + } + this.fullUrn += "#" + s; + } + } + + /** + * Node name, mandatory in all URNs. In universal ones it will be "klab". In local ones, it will + * be "local". + * + * @return the node name. + */ + public String getNodeName() { + return tokens[0]; + } + + /** + * Whether the URN should be processed by the same engine that generates it. + * + * @return true if local + */ + public boolean isLocal() { + return getNodeName().equals("local"); + } + + /** + * Return either an empty array for no parameter present, or an array of values with one or more + * values for the passed parameter set in the url as independent parts. E.g. url#a&b&C would + * return a, b, C. + * + * @param parameter + * @return + */ + public String[] getSplitParameter(String parameter) { + if (parameters.containsKey(parameter)) { + String ss = parameters.get(parameter); + if (ss == null) { + ss = ""; + } + return ss.split(","); + } + return new String[] {}; + } + + /** + * Whether the URN can be processed by any node. In this case, the URN has no attached data and + * the catalog name is the ID of the adapter that will process it. If we don't have the adapter, + * we will choose a node among those that do, using the load factor or some other criterion. + * + * @return true if universal. + */ + public boolean isUniversal() { + return getNodeName().equals("klab"); + } + + /** + * Return the catalog for the resource. In local resources, this is the originator ID. In + * universal resources, this is the adapter ID. Never null. + * + * @return the originator + */ + public String getCatalog() { + return tokens[1]; + } + + /** Return the namespace of the resource. */ + public String getNamespace() { + return tokens.length > 2 ? tokens[2] : null; + } + + /** + * Return the resource ID. Never null. + * + * @return the resource id. + */ + public String getResourceId() { + return tokens.length > 3 ? tokens[3] : null; + } + + /** + * Return the version, if any. + * + * @return + */ + public Version getVersion() { + return tokens.length > 4 ? new Version(tokens[4]) : null; + } + + /** + * Unmodified URN string without parameters + * + * @return the unmodified URN. + */ + public String getUrn() { + return urn; + } + + @Override + public String toString() { + return fullUrn; + } + + public Map getParameters() { + return parameters; + } + + public static Type classify(String urn) { + + if (urn.startsWith("http") && urn.contains("//:")) { + return Type.REMOTE_URL; + } else if (URN_RESOURCE_PATTERN.matcher(urn).find()) { + return Type.RESOURCE; + } else if (URN_KIM_OBJECT_PATTERN.matcher(urn).find()) { + return Type.KIM_OBJECT; + } else if (URN_CONCEPT_PATTERN.matcher(urn).find()) { + return Type.OBSERVABLE; + } + + return Type.UNKNOWN; + } + + public static final String KLAB_URN_PREFIX = "urn:klab:"; + public static final String LOCAL_URN_PREFIX = "urn:klab:local:"; + public static final String VOID_URN_PREFIX = "urn:klab:void:"; + public static final String LOCAL_FILE_PREFIX = "file:"; + + public static boolean isLocal(String urn) { + return urn.startsWith(LOCAL_URN_PREFIX) + || urn.startsWith("local:") + || urn.startsWith(LOCAL_FILE_PREFIX); + } + + public static boolean isUniversal(String urn) { + return urn.startsWith(KLAB_URN_PREFIX) || urn.startsWith("klab:"); + } + + public String getLocalUrn(String resourceId, Project project, String owner) { + return "local:" + owner + ":" + project.getUrn() + ":" + resourceId; + } + + /** + * Create a new local URN with the passed project instead of the original. + * + * @param urn + * @param projectName + * @return + */ + public static String changeLocalProject(String urn, String projectName) { + + if (!isLocal(urn)) { + throw new IllegalArgumentException("cannot change project name in non-local URN " + urn); + } + int fieldIndex = urn.startsWith(LOCAL_URN_PREFIX) ? 4 : 2; + String ret = ""; + int i = 0; + for (String field : urn.split(":")) { + ret += (ret.isEmpty() ? "" : ":") + (i == fieldIndex ? projectName : field); + i++; + } + return ret; + } + + public static Map parseParameters(String uu) { + Map ret = new HashMap<>(); + for (String s : uu.split("&")) { + if (s.contains("=")) { + String[] kv = s.split("="); + ret.put(kv[0], kv[1]); + } else { + ret.put(Urn.SINGLE_PARAMETER_KEY, s); + } + } + return ret; + } + + /** + * Split off the fragment and return the parsed parameter map along with the clean URN. + * + * @param urn + * @return + */ + public static Pair> resolveParameters(String urn) { + Map parameters = new HashMap<>(); + String clean = urn; + if (urn.contains("#")) { + String[] uu = urn.split("#"); + clean = uu[0]; + for (String s : uu[1].split("&")) { + if (s.contains("=")) { + String[] kv = s.split("="); + parameters.put(kv[0], kv[1]); + } else { + parameters.put(Urn.SINGLE_PARAMETER_KEY, s); + } + } + } + return Pair.of(clean, parameters); + } + + public boolean isUrn(String urn) { + // at least two colons in successive positions with something in the middle + int ln = urn.indexOf(':'); + return ln > 0 && urn.lastIndexOf(':') > (ln + 1); + } + + public static String applyParameters(String urn, Map urnParameters) { + String ret = removeParameters(urn); + if (urnParameters != null && !urnParameters.isEmpty()) { + boolean first = true; + for (Entry entry : urnParameters.entrySet()) { + ret += (first ? "#" : "&") + entry.getKey() + "=" + entry.getValue(); + } + } + return ret; + } + + public static String removeParameters(String urn) { + int pound = urn.indexOf(':'); + return pound > 0 ? urn.substring(0, pound) : urn; + } +} diff --git a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/view/modeler/Modeler.java b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/view/modeler/Modeler.java index fc7c2b2a9..e242e94fa 100644 --- a/klab.core.api/src/main/java/org/integratedmodelling/klab/api/view/modeler/Modeler.java +++ b/klab.core.api/src/main/java/org/integratedmodelling/klab/api/view/modeler/Modeler.java @@ -3,6 +3,7 @@ import org.integratedmodelling.klab.api.data.RepositoryState; import org.integratedmodelling.klab.api.knowledge.organization.ProjectStorage; import org.integratedmodelling.klab.api.scope.ContextScope; +import org.integratedmodelling.klab.api.scope.Scope; import org.integratedmodelling.klab.api.scope.SessionScope; import org.integratedmodelling.klab.api.services.KlabService; import org.integratedmodelling.klab.api.view.UIController; @@ -129,6 +130,13 @@ private Option(Class... payloadClass) { */ void setCurrentService(KlabService service); + /** + * Return the innermost current scope available. + * + * @return + */ + Scope getCurrentScope(); + /** * Make the passed context the current one. The context must belong to the current session or an exception * will be thrown. diff --git a/klab.core.common/src/main/java/org/integratedmodelling/common/lang/kim/KimConceptImpl.java b/klab.core.common/src/main/java/org/integratedmodelling/common/lang/kim/KimConceptImpl.java index 7553ee8aa..eb27ccdda 100644 --- a/klab.core.common/src/main/java/org/integratedmodelling/common/lang/kim/KimConceptImpl.java +++ b/klab.core.common/src/main/java/org/integratedmodelling/common/lang/kim/KimConceptImpl.java @@ -12,775 +12,781 @@ public class KimConceptImpl extends KimStatementImpl implements KimConcept { - @Serial - private static final long serialVersionUID = 8531431719010407385L; - - private SemanticRole semanticRole; - private String name; - private Set type = EnumSet.noneOf(SemanticType.class); - private KimConcept observable; - private KimConcept parent; - private KimConcept inherent; - private KimConcept goal; - private KimConcept causant; - private KimConcept caused; - private KimConcept compresent; - private KimConcept comparisonConcept; - private String authorityTerm; - private String authority; - private UnarySemanticOperator semanticModifier; - private KimConcept relationshipSource; - private KimConcept relationshipTarget; - private List traits = new ArrayList<>(); - private List roles = new ArrayList<>(); - private boolean negated; - private String urn; - private List operands = new ArrayList<>(); - private Expression expressionType; - private SemanticType fundamentalType; - private KimConcept cooccurrent; - private KimConcept adjacent; - private String codeName; - private KimConcept temporalInherent; - private boolean collective; - private boolean pattern; - private Set patternVariables = new HashSet<>(); - - public Set getArgumentType() { - return argumentType; - } - - public void setArgumentType(Set argumentType) { - this.argumentType = argumentType; + @Serial private static final long serialVersionUID = 8531431719010407385L; + + private SemanticRole semanticRole; + private String name; + private Set type = EnumSet.noneOf(SemanticType.class); + private KimConcept observable; + private KimConcept parent; + private KimConcept inherent; + private KimConcept goal; + private KimConcept causant; + private KimConcept caused; + private KimConcept compresent; + private KimConcept comparisonConcept; + private String authorityTerm; + private String authority; + private UnarySemanticOperator semanticModifier; + private KimConcept relationshipSource; + private KimConcept relationshipTarget; + private List traits = new ArrayList<>(); + private List roles = new ArrayList<>(); + private boolean negated; + private String urn; + private List operands = new ArrayList<>(); + private Expression expressionType; + private SemanticType fundamentalType; + private KimConcept cooccurrent; + private KimConcept adjacent; + private String codeName; + private KimConcept temporalInherent; + private boolean collective; + private boolean pattern; + private Set patternVariables = new HashSet<>(); + + public Set getArgumentType() { + return argumentType; + } + + public void setArgumentType(Set argumentType) { + this.argumentType = argumentType; + } + + public KimConceptImpl() {} + + private transient Set argumentType = EnumSet.noneOf(SemanticType.class); + + private KimConceptImpl(KimConceptImpl other) { + super(other); + this.semanticRole = other.semanticRole; + this.name = other.name; + this.type = EnumSet.copyOf(other.type); + this.observable = other.observable; + this.parent = other.parent; + this.inherent = other.inherent; + this.goal = other.goal; + this.causant = other.causant; + this.caused = other.caused; + this.compresent = other.compresent; + this.comparisonConcept = other.comparisonConcept; + this.authorityTerm = other.authority; + this.authority = other.authority; + this.semanticModifier = other.semanticModifier; + this.collective = other.collective; + this.relationshipSource = other.relationshipSource; + this.relationshipTarget = other.relationshipTarget; + this.traits.addAll(other.traits); + this.roles.addAll(other.roles); + this.negated = other.negated; + this.urn = other.urn; + this.operands.addAll(other.operands); + this.expressionType = other.expressionType; + this.fundamentalType = other.fundamentalType; + this.cooccurrent = other.cooccurrent; + this.adjacent = other.adjacent; + this.codeName = other.codeName; + this.temporalInherent = other.temporalInherent; + this.argumentType = EnumSet.copyOf(other.argumentType); + } + + @Override + public String getName() { + return this.name; + } + + @Override + public Set getType() { + return this.type; + } + + @Override + public KimConcept getObservable() { + return this.observable; + } + + @Override + public KimConcept getInherent() { + return this.inherent; + } + + @Override + public KimConcept getGoal() { + return this.goal; + } + + @Override + public KimConcept getCausant() { + return this.causant; + } + + @Override + public KimConcept getCaused() { + return this.caused; + } + + @Override + public KimConcept getCompresent() { + return this.compresent; + } + + @Override + public KimConcept getComparisonConcept() { + return this.comparisonConcept; + } + + @Override + public String getAuthorityTerm() { + return this.authorityTerm; + } + + @Override + public String getAuthority() { + return this.authority; + } + + @Override + public UnarySemanticOperator getSemanticModifier() { + return this.semanticModifier; + } + + @Override + public KimConcept getRelationshipSource() { + return this.relationshipSource; + } + + @Override + public KimConcept getRelationshipTarget() { + return this.relationshipTarget; + } + + @Override + public List getTraits() { + return this.traits; + } + + @Override + public List getRoles() { + return this.roles; + } + + @Override + public boolean isNegated() { + return this.negated; + } + + @Override + public String getUrn() { + return this.urn; + } + + @Override + public boolean is(SemanticType type) { + return this.type.contains(type); + } + + @Override + public List getOperands() { + return this.operands; + } + + @Override + public Expression getExpressionType() { + return this.expressionType; + } + + @Override + public SemanticType getFundamentalType() { + return this.fundamentalType; + } + + @Override + public KimConcept getCooccurrent() { + return this.cooccurrent; + } + + @Override + public KimConcept getAdjacent() { + return this.adjacent; + } + + @Override + public String getCodeName() { + return this.codeName; + } + + @Override + public SemanticRole getSemanticRole() { + return this.semanticRole; + } + + @Override + public KimConcept getTemporalInherent() { + return this.temporalInherent; + } + + public void setSemanticRole(SemanticRole semanticRole) { + this.semanticRole = semanticRole; + } + + public void setName(String name) { + this.name = name; + } + + public void setType(Set type) { + this.type = type; + } + + public void setObservable(KimConcept observable) { + this.observable = observable; + } + + public void setInherent(KimConcept inherent) { + this.inherent = inherent; + } + + public void setGoal(KimConcept motivation) { + this.goal = motivation; + } + + public void setCausant(KimConcept causant) { + this.causant = causant; + } + + public void setCaused(KimConcept caused) { + this.caused = caused; + } + + public void setCompresent(KimConcept compresent) { + this.compresent = compresent; + } + + public void setComparisonConcept(KimConcept comparisonConcept) { + this.comparisonConcept = comparisonConcept; + } + + public void setAuthorityTerm(String authorityTerm) { + this.authorityTerm = authorityTerm; + } + + public void setAuthority(String authority) { + this.authority = authority; + } + + public void setSemanticModifier(UnarySemanticOperator semanticModifier) { + this.semanticModifier = semanticModifier; + } + + public void setRelationshipSource(KimConcept relationshipSource) { + this.relationshipSource = relationshipSource; + } + + public void setRelationshipTarget(KimConcept relationshipTarget) { + this.relationshipTarget = relationshipTarget; + } + + public void setTraits(List traits) { + this.traits = traits; + } + + public void setRoles(List roles) { + this.roles = roles; + } + + // public void setTemplate(boolean template) { + // this.template = template; + // } + + public void setNegated(boolean negated) { + this.negated = negated; + } + + public void setUrn(String urn) { + this.urn = urn; + ; + } + + public void setOperands(List operands) { + this.operands = operands; + } + + public void setExpressionType(Expression expressionType) { + this.expressionType = expressionType; + } + + public void setFundamentalType(SemanticType fundamentalType) { + this.fundamentalType = fundamentalType; + } + + public void setCooccurrent(KimConcept cooccurrent) { + this.cooccurrent = cooccurrent; + } + + public void setAdjacent(KimConcept adjacent) { + this.adjacent = adjacent; + } + + public void setCodeName(String codeName) { + this.codeName = codeName; + } + + public void setTemporalInherent(KimConcept temporalInherent) { + this.temporalInherent = temporalInherent; + } + + @Override + public KimConcept getParent() { + return parent; + } + + public void setParent(KimConcept parent) { + this.parent = parent; + } + + @Override + public String toString() { + return this.urn; + } + + /* + * modification methods + */ + + public KimConcept removeOperator() { + KimConceptImpl ret = new KimConceptImpl(this); + if (this.semanticModifier != null) { + ret.semanticModifier = null; + ret.comparisonConcept = null; + ret.type = this.argumentType; + ret.urn = ret.computeUrn(); + } + return ret; + } + + public KimConcept removeComponents(SemanticRole... roles) { + + KimConceptImpl ret = new KimConceptImpl(this); + + for (SemanticRole role : roles) { + + switch (role) { + case ADJACENT: + ret.adjacent = null; + break; + case CAUSANT: + ret.causant = null; + break; + case CAUSED: + ret.caused = null; + break; + case COMPRESENT: + ret.compresent = null; + break; + // case CONTEXT: + // ret.context = null; + // break; + case COOCCURRENT: + ret.cooccurrent = null; + break; + case GOAL: + ret.goal = null; + break; + case INHERENT: + ret.inherent = null; + break; + case ROLE: + ret.roles.clear(); + break; + case TRAIT: + ret.traits.clear(); + break; + case TEMPORAL_INHERENT: + ret.temporalInherent = null; + break; + case UNARY_OPERATOR: + ((KimConceptImpl) ret.observable).semanticModifier = null; + break; + default: + break; + } + } + + this.urn = ret.urn = computeUrn(); + + return ret; + } + + public KimConcept removeComponents(List declarations, List roles) { + + KimConceptImpl ret = new KimConceptImpl(this); + + for (int i = 0; i < declarations.size(); i++) { + + String declaration = declarations.get(i); + SemanticRole role = roles.get(i); + + switch (role) { + case ADJACENT: + ret.adjacent = null; + break; + case CAUSANT: + ret.causant = null; + break; + case CAUSED: + ret.caused = null; + break; + case COMPRESENT: + ret.compresent = null; + break; + // case CONTEXT: + // ret.context = null; + // break; + case COOCCURRENT: + ret.cooccurrent = null; + break; + case GOAL: + ret.goal = null; + break; + case INHERENT: + ret.inherent = null; + break; + case TEMPORAL_INHERENT: + ret.temporalInherent = null; + break; + case ROLE: + ret.roles = copyWithout(ret.roles, declaration); + break; + case TRAIT: + ret.traits = copyWithout(ret.traits, declaration); + break; + default: + break; + } + } + + this.urn = computeUrn(); + + return ret; + } + + private static List copyWithout(List concepts, String declaration) { + List ret = new ArrayList<>(); + for (KimConcept c : concepts) { + if (!c.toString().equals(declaration)) { + ret.add(c); + } + } + return ret; + } + + /** Create a text declaration that can be parsed back into a concept. */ + public String computeUrn() { + + String ret = isCollective() ? "each" : ""; + boolean complex = false; + + if (type.contains(SemanticType.NOTHING)) { + return "owl:Nothing"; + } + + if (semanticModifier != null) { + ret += (ret.isEmpty() ? "" : " ") + semanticModifier.declaration[0]; + complex = true; + } + + if (negated) { + ret += (ret.isEmpty() ? "" : " ") + "not"; + complex = true; } - public KimConceptImpl() { - } - - transient private Set argumentType = EnumSet.noneOf(SemanticType.class); - - private KimConceptImpl(KimConceptImpl other) { - super(other); - this.semanticRole = other.semanticRole; - this.name = other.name; - this.type = EnumSet.copyOf(other.type); - this.observable = other.observable; - this.parent = other.parent; - this.inherent = other.inherent; - this.goal = other.goal; - this.causant = other.causant; - this.caused = other.caused; - this.compresent = other.compresent; - this.comparisonConcept = other.comparisonConcept; - this.authorityTerm = other.authority; - this.authority = other.authority; - this.semanticModifier = other.semanticModifier; - this.collective = other.collective; - this.relationshipSource = other.relationshipSource; - this.relationshipTarget = other.relationshipTarget; - this.traits.addAll(other.traits); - this.roles.addAll(other.roles); - this.negated = other.negated; - this.urn = other.urn; - this.operands.addAll(other.operands); - this.expressionType = other.expressionType; - this.fundamentalType = other.fundamentalType; - this.cooccurrent = other.cooccurrent; - this.adjacent = other.adjacent; - this.codeName = other.codeName; - this.temporalInherent = other.temporalInherent; - this.argumentType = EnumSet.copyOf(other.argumentType); - } - - @Override - public String getName() { - return this.name; - } - - @Override - public Set getType() { - return this.type; - } - - @Override - public KimConcept getObservable() { - return this.observable; - } - - @Override - public KimConcept getInherent() { - return this.inherent; - } - - @Override - public KimConcept getGoal() { - return this.goal; - } - - @Override - public KimConcept getCausant() { - return this.causant; - } - - @Override - public KimConcept getCaused() { - return this.caused; - } - - @Override - public KimConcept getCompresent() { - return this.compresent; - } - - @Override - public KimConcept getComparisonConcept() { - return this.comparisonConcept; - } - - @Override - public String getAuthorityTerm() { - return this.authorityTerm; - } - - @Override - public String getAuthority() { - return this.authority; - } - - @Override - public UnarySemanticOperator getSemanticModifier() { - return this.semanticModifier; - } - - @Override - public KimConcept getRelationshipSource() { - return this.relationshipSource; - } - - @Override - public KimConcept getRelationshipTarget() { - return this.relationshipTarget; - } - - @Override - public List getTraits() { - return this.traits; - } - - @Override - public List getRoles() { - return this.roles; - } - - @Override - public boolean isNegated() { - return this.negated; - } - - @Override - public String getUrn() { - return this.urn; - } - - @Override - public boolean is(SemanticType type) { - return this.type.contains(type); - } - - @Override - public List getOperands() { - return this.operands; - } - - @Override - public Expression getExpressionType() { - return this.expressionType; - } - - @Override - public SemanticType getFundamentalType() { - return this.fundamentalType; - } - - @Override - public KimConcept getCooccurrent() { - return this.cooccurrent; - } + StringBuilder concepts = new StringBuilder(); + boolean ccomplex = false; - @Override - public KimConcept getAdjacent() { - return this.adjacent; + for (KimConcept trait : traits) { + concepts + .append((concepts.isEmpty()) ? "" : " ") + .append(parenthesize(((KimConceptImpl) trait).computeUrn())); + ccomplex = true; } - @Override - public String getCodeName() { - return this.codeName; + for (KimConcept role : roles) { + concepts + .append((concepts.isEmpty()) ? "" : " ") + .append(parenthesize(((KimConceptImpl) role).computeUrn())); + ccomplex = true; } - @Override - public SemanticRole getSemanticRole() { - return this.semanticRole; - } + // for (KimConcept conc : unclassified) { + // concepts += (concepts.isEmpty() ? "" : " ") + conc; + // ccomplex = true; + // } + + concepts + .append((concepts.isEmpty()) ? "" : " ") + .append(name == null ? ((KimConceptImpl) observable).computeUrn() : name); + var needsParentheses = ccomplex && !ret.equals("each"); + ret += + (ret.isEmpty() ? "" : " ") + + (needsParentheses ? "(" : "") + + concepts + + (needsParentheses ? ")" : ""); - @Override - public KimConcept getTemporalInherent() { - return this.temporalInherent; + if (comparisonConcept != null) { + ret += + " " + + semanticModifier.declaration[1] + + " " + + ((KimConceptImpl) comparisonConcept).computeUrn(); + complex = true; } - public void setSemanticRole(SemanticRole semanticRole) { - this.semanticRole = semanticRole; - } + // if (authority != null) { + // ret += " identified as " + stringify(authorityTerm) + " by " + authority; + // complex = true; + // } - public void setName(String name) { - this.name = name; + if (inherent != null) { + ret += " of " + ((KimConceptImpl) inherent).computeUrn(); + complex = true; } - public void setType(Set type) { - this.type = type; - } + // if (context != null) { + // ret += " within " + ((KimConceptImpl) context).computeUrn(); + // complex = true; + // } - public void setObservable(KimConcept observable) { - this.observable = observable; + if (causant != null) { + ret += " caused by " + ((KimConceptImpl) causant).computeUrn(); + complex = true; } - public void setInherent(KimConcept inherent) { - this.inherent = inherent; + if (caused != null) { + ret += " causing " + ((KimConceptImpl) caused).computeUrn(); + complex = true; } - public void setGoal(KimConcept motivation) { - this.goal = motivation; + if (compresent != null) { + ret += " with " + ((KimConceptImpl) compresent).computeUrn(); + complex = true; } - public void setCausant(KimConcept causant) { - this.causant = causant; + if (cooccurrent != null) { + ret += " during " + ((KimConceptImpl) cooccurrent).computeUrn(); + complex = true; } - public void setCaused(KimConcept caused) { - this.caused = caused; + if (temporalInherent != null) { + ret += " during each " + ((KimConceptImpl) temporalInherent).computeUrn(); + complex = true; } - public void setCompresent(KimConcept compresent) { - this.compresent = compresent; + if (adjacent != null) { + ret += " adjacent to " + ((KimConceptImpl) adjacent).computeUrn(); + complex = true; } - public void setComparisonConcept(KimConcept comparisonConcept) { - this.comparisonConcept = comparisonConcept; + if (goal != null) { + ret += " for " + ((KimConceptImpl) goal).computeUrn(); + complex = true; } - public void setAuthorityTerm(String authorityTerm) { - this.authorityTerm = authorityTerm; + if (relationshipSource != null) { + ret += " linking " + ((KimConceptImpl) relationshipSource).computeUrn(); + if (relationshipTarget != null) { + ret += " to " + ((KimConceptImpl) relationshipSource).computeUrn(); + } + complex = true; } - public void setAuthority(String authority) { - this.authority = authority; - } + boolean expression = false; + for (KimConcept operand : operands) { + ret += " " + (expressionType == Expression.INTERSECTION ? "and" : "or") + " " + operand; + complex = true; + expression = true; + } + + return (expression /* ccomplex || complex */) ? parenthesize(ret) : ret; + } - public void setSemanticModifier(UnarySemanticOperator semanticModifier) { - this.semanticModifier = semanticModifier; - } + /** + * Add parentheses around a declaration unless it is already enclosed in parentheses or it is a + * single concept. + * + * @param ret + * @return + */ + private static String parenthesize(String ret) { + ret = ret.trim(); + boolean enclosed = ret.startsWith("(") && ret.endsWith(")"); + boolean trivial = !ret.trim().contains(" "); + return (enclosed || trivial) ? ret : ("(" + ret + ")"); + } - public void setRelationshipSource(KimConcept relationshipSource) { - this.relationshipSource = relationshipSource; - } + private String stringify(String term) { - public void setRelationshipTarget(KimConcept relationshipTarget) { - this.relationshipTarget = relationshipTarget; + if (term.startsWith("\"")) { + return term; } - public void setTraits(List traits) { - this.traits = traits; - } + boolean ws = false; - public void setRoles(List roles) { - this.roles = roles; + // stringify anything that's not a lowercase ID + for (int i = 0; i < term.length(); i++) { + if (Character.isWhitespace(term.charAt(i)) + || !(Character.isLetter(term.charAt(i)) + || Character.isDigit(term.charAt(i)) + || term.charAt(i) == '_')) { + ws = true; + break; + } } - // public void setTemplate(boolean template) { - // this.template = template; - // } + // TODO should escape any internal double quotes, unlikely + return ws ? ("\"" + term + "\"") : term; + } - public void setNegated(boolean negated) { - this.negated = negated; - } + @Override + public boolean isCollective() { + return collective; + } - public void setUrn(String urn) { - this.urn = urn; - ; - } + public void setCollective(boolean collective) { + this.collective = collective; + } - public void setOperands(List operands) { - this.operands = operands; - } + @Override + public int hashCode() { + return Objects.hash(urn); + } - public void setExpressionType(Expression expressionType) { - this.expressionType = expressionType; - } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + KimConceptImpl other = (KimConceptImpl) obj; + return Objects.equals(urn, other.urn); + } - public void setFundamentalType(SemanticType fundamentalType) { - this.fundamentalType = fundamentalType; - } + /** + * Call after making modifications to finalize the concept and update the URN + * + *

TODO check abstract state as well + */ + public void finalizeDefinition() { + this.urn = computeUrn(); + } - public void setCooccurrent(KimConcept cooccurrent) { - this.cooccurrent = cooccurrent; - } + public static KimConcept nothing() { + var ret = new KimConceptImpl(); + ret.setName("Nothing"); + ret.setNamespace("klab"); + ret.setType(EnumSet.of(SemanticType.NOTHING)); + return ret; + } - public void setAdjacent(KimConcept adjacent) { - this.adjacent = adjacent; - } + @Override + public boolean isPattern() { + return pattern; + } - public void setCodeName(String codeName) { - this.codeName = codeName; - } + public void setPattern(boolean pattern) { + this.pattern = pattern; + } - public void setTemporalInherent(KimConcept temporalInherent) { - this.temporalInherent = temporalInherent; - } + @Override + public Set getPatternVariables() { + return patternVariables; + } - @Override - public KimConcept getParent() { - return parent; - } + public void setPatternVariables(Set patternVariables) { + this.patternVariables = patternVariables; + } - public void setParent(KimConcept parent) { - this.parent = parent; - } + @Override + public void visit(Visitor visitor) { - @Override - public String toString() { - return this.urn; + if (observable != null) { + observable.visit(visitor); } - /* - * modification methods - */ - - public KimConcept removeOperator() { - KimConceptImpl ret = new KimConceptImpl(this); - if (this.semanticModifier != null) { - ret.semanticModifier = null; - ret.comparisonConcept = null; - ret.type = this.argumentType; - ret.urn = ret.computeUrn(); - } - return ret; + if (authority != null) { + // visitor.visitAuthority(authority, authorityTerm); } - public KimConcept removeComponents(SemanticRole... roles) { - - KimConceptImpl ret = new KimConceptImpl(this); - - for (SemanticRole role : roles) { - - switch (role) { - case ADJACENT: - ret.adjacent = null; - break; - case CAUSANT: - ret.causant = null; - break; - case CAUSED: - ret.caused = null; - break; - case COMPRESENT: - ret.compresent = null; - break; - // case CONTEXT: - // ret.context = null; - // break; - case COOCCURRENT: - ret.cooccurrent = null; - break; - case GOAL: - ret.goal = null; - break; - case INHERENT: - ret.inherent = null; - break; - case ROLE: - ret.roles.clear(); - break; - case TRAIT: - ret.traits.clear(); - break; - case TEMPORAL_INHERENT: - ret.temporalInherent = null; - break; - case UNARY_OPERATOR: - ((KimConceptImpl) ret.observable).semanticModifier = null; - break; - default: - break; - } - } - - this.urn = ret.urn = computeUrn(); - - return ret; - } - - public KimConcept removeComponents(List declarations, List roles) { - - KimConceptImpl ret = new KimConceptImpl(this); - - for (int i = 0; i < declarations.size(); i++) { - - String declaration = declarations.get(i); - SemanticRole role = roles.get(i); - - switch (role) { - case ADJACENT: - ret.adjacent = null; - break; - case CAUSANT: - ret.causant = null; - break; - case CAUSED: - ret.caused = null; - break; - case COMPRESENT: - ret.compresent = null; - break; - // case CONTEXT: - // ret.context = null; - // break; - case COOCCURRENT: - ret.cooccurrent = null; - break; - case GOAL: - ret.goal = null; - break; - case INHERENT: - ret.inherent = null; - break; - case TEMPORAL_INHERENT: - ret.temporalInherent = null; - break; - case ROLE: - ret.roles = copyWithout(ret.roles, declaration); - break; - case TRAIT: - ret.traits = copyWithout(ret.traits, declaration); - break; - default: - break; - } - } - - this.urn = computeUrn(); - - return ret; - } - - private static List copyWithout(List concepts, String declaration) { - List ret = new ArrayList<>(); - for (KimConcept c : concepts) { - if (!c.toString().equals(declaration)) { - ret.add(c); - } - } - return ret; - } - - /** - * Create a text declaration that can be parsed back into a concept. - */ - public String computeUrn() { - - String ret = isCollective() ? "each" : ""; - boolean complex = false; - - if (type.contains(SemanticType.NOTHING)) { - return "owl:Nothing"; - } - - if (semanticModifier != null) { - ret += (ret.isEmpty() ? "" : " ") + semanticModifier.declaration[0]; - complex = true; - } - - if (negated) { - ret += (ret.isEmpty() ? "" : " ") + "not"; - complex = true; - } - - StringBuilder concepts = new StringBuilder(); - boolean ccomplex = false; - - for (KimConcept trait : traits) { - concepts.append((concepts.isEmpty()) ? "" : " ").append( - parenthesize(((KimConceptImpl) trait).computeUrn())); - ccomplex = true; - } - - for (KimConcept role : roles) { - concepts.append((concepts.isEmpty()) ? "" : " ").append( - parenthesize(((KimConceptImpl) role).computeUrn())); - ccomplex = true; - } - - // for (KimConcept conc : unclassified) { - // concepts += (concepts.isEmpty() ? "" : " ") + conc; - // ccomplex = true; - // } - - concepts.append((concepts.isEmpty()) ? "" : " ").append( - name == null ? ((KimConceptImpl) observable).computeUrn() : name); - var needsParentheses = ccomplex && !ret.equals("each"); - ret += (ret.isEmpty() ? "" : " ") + (needsParentheses ? "(" : "") + concepts + (needsParentheses ? - ")" : ""); - - if (comparisonConcept != null) { - ret += " " + semanticModifier.declaration[1] + " " + ((KimConceptImpl) comparisonConcept).computeUrn(); - complex = true; - } - - // if (authority != null) { - // ret += " identified as " + stringify(authorityTerm) + " by " + authority; - // complex = true; - // } - - if (inherent != null) { - ret += " of " + ((KimConceptImpl) inherent).computeUrn(); - complex = true; - } - - // if (context != null) { - // ret += " within " + ((KimConceptImpl) context).computeUrn(); - // complex = true; - // } - - if (causant != null) { - ret += " caused by " + ((KimConceptImpl) causant).computeUrn(); - complex = true; - } - - if (caused != null) { - ret += " causing " + ((KimConceptImpl) caused).computeUrn(); - complex = true; - } - - if (compresent != null) { - ret += " with " + ((KimConceptImpl) compresent).computeUrn(); - complex = true; - } - - if (cooccurrent != null) { - ret += " during " + ((KimConceptImpl) cooccurrent).computeUrn(); - complex = true; - } - - if (temporalInherent != null) { - ret += " during each " + ((KimConceptImpl) temporalInherent).computeUrn(); - complex = true; - } - - if (adjacent != null) { - ret += " adjacent to " + ((KimConceptImpl) adjacent).computeUrn(); - complex = true; - } - - if (goal != null) { - ret += " for " + ((KimConceptImpl) goal).computeUrn(); - complex = true; - } - - if (relationshipSource != null) { - ret += " linking " + ((KimConceptImpl) relationshipSource).computeUrn(); - if (relationshipTarget != null) { - ret += " to " + ((KimConceptImpl) relationshipSource).computeUrn(); - } - complex = true; - } - - boolean expression = false; - for (KimConcept operand : operands) { - ret += " " + (expressionType == Expression.INTERSECTION ? "and" : "or") + " " + operand; - complex = true; - expression = true; - } - - return (expression /* ccomplex || complex */) ? parenthesize(ret) : ret; + for (KimConcept trait : traits) { + trait.visit(visitor); } - /** - * Add parentheses around a declaration unless it is already enclosed in parentheses or it is a single - * concept. - * - * @param ret - * @return - */ - private static String parenthesize(String ret) { - ret = ret.trim(); - boolean enclosed = ret.startsWith("(") && ret.endsWith(")"); - boolean trivial = !ret.trim().contains(" "); - return (enclosed || trivial) ? ret : ("(" + ret + ")"); + for (KimConcept role : roles) { + role.visit(visitor); } - private String stringify(String term) { - - if (term.startsWith("\"")) { - return term; - } - - boolean ws = false; - - // stringify anything that's not a lowercase ID - for (int i = 0; i < term.length(); i++) { - if (Character.isWhitespace(term.charAt(i)) || !(Character.isLetter( - term.charAt(i)) || Character.isDigit(term.charAt(i)) || term.charAt(i) == '_')) { - ws = true; - break; - } - } - - // TODO should escape any internal double quotes, unlikely - return ws ? ("\"" + term + "\"") : term; - } - - @Override - public boolean isCollective() { - return collective; + if (inherent != null) { + inherent.visit(visitor); } - public void setCollective(boolean collective) { - this.collective = collective; + if (causant != null) { + causant.visit(visitor); } - @Override - public int hashCode() { - return Objects.hash(urn); + if (caused != null) { + caused.visit(visitor); } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - KimConceptImpl other = (KimConceptImpl) obj; - return Objects.equals(urn, other.urn); + if (compresent != null) { + compresent.visit(visitor); } - /** - * Call after making modifications to finalize the concept and update the URN - *

- * TODO check abstract state as well - */ - public void finalizeDefinition() { - this.urn = computeUrn(); + if (cooccurrent != null) { + cooccurrent.visit(visitor); } - - public static KimConcept nothing() { - var ret = new KimConceptImpl(); - ret.setName("Nothing"); - ret.setNamespace("klab"); - ret.setType(EnumSet.of(SemanticType.NOTHING)); - return ret; + if (adjacent != null) { + adjacent.visit(visitor); } - @Override - public boolean isPattern() { - return pattern; + if (temporalInherent != null) { + temporalInherent.visit(visitor); } - public void setPattern(boolean pattern) { - this.pattern = pattern; + if (goal != null) { + goal.visit(visitor); } - @Override - public Set getPatternVariables() { - return patternVariables; + if (relationshipSource != null) { + relationshipSource.visit(visitor); } - public void setPatternVariables(Set patternVariables) { - this.patternVariables = patternVariables; + if (relationshipTarget != null) { + relationshipTarget.visit(visitor); } - @Override - public void visit(Visitor visitor) { - - if (observable != null) { - observable.visit(visitor); - } - - if (authority != null) { - // visitor.visitAuthority(authority, authorityTerm); - } - - for (KimConcept trait : traits) { - trait.visit(visitor); - } - - for (KimConcept role : roles) { - role.visit(visitor); - } - - if (inherent != null) { - inherent.visit(visitor); - } - - if (causant != null) { - causant.visit(visitor); - } - - if (caused != null) { - caused.visit(visitor); - } - - if (compresent != null) { - compresent.visit(visitor); - } - - if (cooccurrent != null) { - cooccurrent.visit(visitor); - } - - if (adjacent != null) { - adjacent.visit(visitor); - } - - if (temporalInherent != null) { - temporalInherent.visit(visitor); - } - - if (goal != null) { - goal.visit(visitor); - } - - if (relationshipSource != null) { - relationshipSource.visit(visitor); - } - - if (relationshipTarget != null) { - relationshipTarget.visit(visitor); - } - - if (comparisonConcept != null) { - comparisonConcept.visit(visitor); - } - + if (comparisonConcept != null) { + comparisonConcept.visit(visitor); } + } - @Override - public Triple semanticOperation() { - if (semanticModifier != null) { - return Triple.of(semanticModifier, observable, comparisonConcept); - } - return null; + @Override + public Triple semanticOperation() { + if (semanticModifier != null) { + return Triple.of(semanticModifier, observable, comparisonConcept); } + return null; + } - @Override - public KimConcept semanticClause(SemanticClause semanticClause) { - return switch (semanticClause) { - case OF -> inherent; - case FOR -> goal; - case WITH -> compresent; - case CAUSED_BY -> causant; - case ADJACENT_TO -> adjacent; - case CAUSING -> caused; - case DURING -> temporalInherent; - case LINKING -> relationshipSource; - case TO -> relationshipTarget; - }; - } + @Override + public KimConcept semanticClause(SemanticClause semanticClause) { + return switch (semanticClause) { + case OF -> inherent; + case FOR -> goal; + case WITH -> compresent; + case CAUSED_BY -> causant; + case ADJACENT_TO -> adjacent; + case CAUSING -> caused; + case DURING -> temporalInherent; + case LINKING -> relationshipSource; + case TO -> relationshipTarget; + }; + } } diff --git a/klab.modeler/src/main/java/org/integratedmodelling/klab/modeler/ModelerImpl.java b/klab.modeler/src/main/java/org/integratedmodelling/klab/modeler/ModelerImpl.java index a7395e981..ef71543f0 100644 --- a/klab.modeler/src/main/java/org/integratedmodelling/klab/modeler/ModelerImpl.java +++ b/klab.modeler/src/main/java/org/integratedmodelling/klab/modeler/ModelerImpl.java @@ -1,6 +1,5 @@ package org.integratedmodelling.klab.modeler; -import com.jcraft.jsch.Session; import org.integratedmodelling.common.authentication.scope.AbstractReactiveScopeImpl; import org.integratedmodelling.common.authentication.scope.AbstractServiceDelegatingScope; import org.integratedmodelling.common.services.client.engine.EngineImpl; @@ -55,517 +54,597 @@ import java.util.Map; /** - * A {@link UIController} specialized to provide and orchestrate the views and panels that compose the - * k.Modeler application. Uses an {@link EngineImpl} which will connect to local services if available. Also - * handles one or more users and keeps a catalog of sessions and contexts, tagging the "current" one in focus - * in the UI. - *

- * Call {@link #boot()} in a separate thread when the view is initialized and let the UI events do the rest. + * A {@link UIController} specialized to provide and orchestrate the views and panels that compose + * the k.Modeler application. Uses an {@link EngineImpl} which will connect to local services if + * available. Also handles one or more users and keeps a catalog of sessions and contexts, tagging + * the "current" one in focus in the UI. + * + *

Call {@link #boot()} in a separate thread when the view is initialized and let the UI events + * do the rest. */ public class ModelerImpl extends AbstractUIController implements Modeler, PropertyHolder { - private ContextScope currentContext; - private SessionScope currentSession; - private List sessions = new ArrayList<>(); - private MultiValueMap contexts = new LinkedMultiValueMap<>(); - - EngineConfiguration workbench; - File workbenchDefinition; - private Map serviceUrls = new HashMap<>(); - private Geometry focalGeometry = Geometry.EMPTY; - private int contextCount = 0; - private int sessionCount = 0; - - public ModelerImpl() { - super(); - // read the workbench config - this.workbenchDefinition = Configuration.INSTANCE.getFileWithTemplate("modeler/workbench.yaml", - Utils.YAML.asString(new EngineConfiguration())); - this.workbench = Utils.YAML.load(workbenchDefinition, EngineConfiguration.class); - } - - public ModelerImpl(UI ui) { - super(ui); - // TODO read the workbench config - NAH this probably pertains to the IDE - } - - @Override - public void dispatch(UIReactor sender, UIEvent event, Object... payload) { - - // intercept some messages for bookkeeping - if (event == UIEvent.EngineStatusChanged) { - - Engine.Status status = (Engine.Status) payload[0]; - - for (var capabilities : status.getServicesCapabilities().values()) { - - if (capabilities == null) { - continue; - } - - if (capabilities.getUrl() != null) { - serviceUrls.put(capabilities.getServiceId(), capabilities.getUrl()); - } - if (capabilities.getBrokerURI() != null && scope() instanceof AbstractReactiveScopeImpl serviceClient) { - /* - * Instrument the service client for messaging. This is pretty involved alas, but the - * whole - * matter isn't exactly trivial. - */ - var client = serviceClient.getService(capabilities.getServiceId()); - if (client != null && client.serviceScope() instanceof AbstractServiceDelegatingScope delegatingScope && delegatingScope.getDelegateChannel() instanceof MessagingChannel messagingChannel) { - /* - * If the scope delegates to a messaging channel, set up messaging and link the - * available service queues to service message dispatchers. - */ - if (!messagingChannel.isConnected()) { - messagingChannel.connectToService(capabilities, - (UserIdentity) user().getIdentity(), - (message) -> dispatchServerMessage(capabilities, message)); - } - } - } - } - } + private ContextScope currentContext; + private SessionScope currentSession; + private List sessions = new ArrayList<>(); + private MultiValueMap contexts = new LinkedMultiValueMap<>(); - super.dispatch(sender, event, payload); - } - - private void dispatchServerMessage(KlabService.ServiceCapabilities capabilities, Message message) { - // TODO do things - System.out.println("SERVER MESSAGE FROM " + capabilities.getType() + " " + capabilities.getServiceId() + ": " + message); - } - - @Override - public Engine createEngine() { - // TODO first should locate and set the distribution - return new EngineImpl(); - } - - @Override - protected void createView() { - - /* - pre-built view controllers. View implementations will self-register upon creation. - */ - registerViewController(new ServicesViewControllerImpl(this)); - registerViewController(new DistributionViewImplController(this)); - registerViewController(new ResourcesNavigatorControllerImpl(this)); - registerViewController(new ContextInspectorControllerImpl(this)); - registerViewController(new AuthenticationViewControllerImpl(this)); - registerViewController(new ContextControllerImpl(this)); - registerViewController(new KnowledgeInspectorControllerImpl(this)); - // TODO etc. - - /* - panel classes - */ - registerPanelControllerClass(DocumentEditorControllerImpl.class); - } + EngineConfiguration workbench; + File workbenchDefinition; + private Map serviceUrls = new HashMap<>(); + private Geometry focalGeometry = Geometry.EMPTY; + private int contextCount = 0; + private int sessionCount = 0; - @Override - public void switchWorkbenchService(UIReactor requestingReactor, KlabService.ServiceCapabilities service) { - // TODO - super.switchWorkbenchService(requestingReactor, service); - } - - @Override - public void switchWorkbench(UIReactor requestingReactor, NavigableContainer container) { - if (getUI() != null) { - // we assume that the workspace is mainly intended to show documents and focus on assets. - // Switching the focal container changes all that, so we first clean everything. - getUI().cleanWorkspace(); - } - super.switchWorkbench(requestingReactor, container); - } + public ModelerImpl() { + super(); + // read the workbench config + this.workbenchDefinition = + Configuration.INSTANCE.getFileWithTemplate( + "modeler/workbench.yaml", Utils.YAML.asString(new EngineConfiguration())); + this.workbench = Utils.YAML.load(workbenchDefinition, EngineConfiguration.class); + } - @Override - public void configureWorkbench(UIReactor requestingReactor, NavigableDocument document, boolean shown) { - // TODO - super.configureWorkbench(requestingReactor, document, shown); - } - - @Override - public void setOption(Option option, Object... payload) { - // TODO validate option - // TODO react - } + public ModelerImpl(UI ui) { + super(ui); + // TODO read the workbench config - NAH this probably pertains to the IDE + } - @Override - public void observe(Object asset, boolean adding) { + @Override + public void dispatch(UIReactor sender, UIEvent event, Object... payload) { - if (currentUser() == null) { - throw new KlabAuthorizationException("Cannot make observations with an invalid user"); - } + // intercept some messages for bookkeeping + if (event == UIEvent.EngineStatusChanged) { - /** - * - * Use cases: - * - * Admitted with a current context or focal scale - * - * Concept (from ontology, knowledge explorer/inspector or define) - * Promote to observable (if countable becomes collective) - * Observable (from define or knowledge inspector) - * Observe as expected - * Model (from namespace or search) - * Observe as expected - * Resource from catalog (local or remote) - * Observe with non-semantic observable - * Observation from define (can be inline, a URN#ID, other) - * If adding==true, any existing context is preserved and added to - * If adding==false, a new context is created and any previous goes out of focus - * Observation from context tree - * Just sets the target for the next observations - * - * ALL can be either an object or a URN or DOI from inside or outside - * - * Admitted w/o a current context or focal scale - * - * Observation from define (can be inline, a URN#ID, other) - * - * If there is no session, must create a default session & select it - * If there is no context, must create a default empty context within the session & select it - */ - - if (currentSession == null) { - currentSession = openNewSession("S" + (++sessionCount)); - } + Engine.Status status = (Engine.Status) payload[0]; - if (currentContext == null && currentSession != null) { - currentContext = openNewContext("C" + (++contextCount)); - } + for (var capabilities : status.getServicesCapabilities().values()) { - if (currentContext == null) { - user().error("cannot create an observation context: aborting", UI.Interactivity.DISPLAY); - return; + if (capabilities == null) { + continue; } - List resolvables = new ArrayList<>(); - List constraints = new ArrayList<>(); - boolean isObserver = false; - - /** - * Assets are observed by URN unless they're models or observation definitions - */ - if (asset instanceof NavigableKlabStatement navigableAsset) { - asset = navigableAsset.getDelegate(); + if (capabilities.getUrl() != null) { + serviceUrls.put(capabilities.getServiceId(), capabilities.getUrl()); } - - if (asset instanceof KlabStatement statement) { - - constraints.add(ResolutionConstraint.of(ResolutionConstraint.Type.ResolutionNamespace, - statement.getNamespace())); - constraints.add(ResolutionConstraint.of(ResolutionConstraint.Type.ResolutionProject, - statement.getProjectName())); - - if (statement instanceof KimModel model) { - resolvables.add(model.getObservables().getFirst()); - constraints.add(ResolutionConstraint.of(ResolutionConstraint.Type.UsingModel, - model.getUrn())); - } else if (statement instanceof KimSymbolDefinition definition) { - if ("observation".equals(definition.getDefineClass())) { - resolvables.add(statement); - } else if ("observer".equals(definition.getDefineClass())) { - resolvables.add(statement); - isObserver = true; - constraints.add(ResolutionConstraint.of(ResolutionConstraint.Type.UseAsObserver)); - } - } else if (statement instanceof KimConceptStatement conceptStatement) { - // TODO check observable vs. context (qualities w/ their context etc.) - resolvables.add(conceptStatement); - } else if (statement instanceof KimObservable conceptStatement) { - // TODO check observable vs. context (qualities w/ their context etc.) - resolvables.add(conceptStatement); + if (capabilities.getBrokerURI() != null + && scope() instanceof AbstractReactiveScopeImpl serviceClient) { + /* + * Instrument the service client for messaging. This is pretty involved alas, but the + * whole + * matter isn't exactly trivial. + */ + var client = serviceClient.getService(capabilities.getServiceId()); + if (client != null + && client.serviceScope() instanceof AbstractServiceDelegatingScope delegatingScope + && delegatingScope.getDelegateChannel() + instanceof MessagingChannel messagingChannel) { + /* + * If the scope delegates to a messaging channel, set up messaging and link the + * available service queues to service message dispatchers. + */ + if (!messagingChannel.isConnected()) { + messagingChannel.connectToService( + capabilities, + (UserIdentity) user().getIdentity(), + (message) -> dispatchServerMessage(capabilities, message)); } - } else if (asset instanceof String || asset instanceof Urn) { - resolvables.add(asset.toString()); - } - - /* - TODO add scenario constraints - scenario controller (TBI) should keep them between contexts - */ - - if (resolvables.isEmpty()) { - currentContext.warn("No resolvable assets: observation not started"); - return; - } - - var observation = DigitalTwin.createObservation(currentContext, resolvables.toArray()); - - if (observation == null) { - currentContext.error("Cannot create an observation out of " + asset + ": aborting"); - return; - } - - final boolean observering = isObserver; - - /* one-time event handlers */ - currentContext - .onEvent(Message.MessageClass.ObservationLifecycle, - Message.MessageType.ResolutionSuccessful, (message) -> { - var obs = message.getPayload(Observation.class); - if (observering) { - setCurrentContext(currentContext.withObserver(obs)); - // TODO send a UI event - currentContext.ui(Message.create(currentContext, - Message.MessageClass.UserInterface, - Message.MessageType.CurrentContextModified)); - currentContext.info(obs + " is now the current observer"); - } else if (currentContext.getContextObservation() == null && obs.getObservable().is(SemanticType.SUBJECT)) { - setCurrentContext(currentContext.within(obs)); - currentContext.ui(Message.create(currentContext, - Message.MessageClass.UserInterface, - Message.MessageType.CurrentContextModified)); - currentContext.info(obs + " is now the current context observation"); - } else { - currentContext.info("Observation of " + obs + " completed successfully"); - } - }, observation) - .onEvent(Message.MessageClass.ObservationLifecycle, Message.MessageType.ResolutionAborted, - (message) -> { - currentContext.error("Resolution of observation " + observation + " was aborted" + - " due to errors: " + message.getPayload(Object.class)); - }, observation) - .onEvent(Message.MessageClass.ObservationLifecycle, - Message.MessageType.ResolutionUnsuccessful, - (message) -> { - var obs = message.getPayload(Observation.class); - if (observering) { - setCurrentContext(currentContext.withObserver(obs)); - // send UI event - currentContext.ui(Message.create(currentContext, - Message.MessageClass.UserInterface, - Message.MessageType.CurrentContextModified)); - currentContext.info(obs + " is now the current observer (unresolved)"); - } else if (currentContext.getContextObservation() == null && obs.getObservable().is(SemanticType.SUBJECT)) { - setCurrentContext(currentContext.within(obs)); - currentContext.ui(Message.create(currentContext, - Message.MessageClass.UserInterface, - Message.MessageType.CurrentContextModified)); - currentContext.info(obs + " is now the current context observation (unresolved)"); - } else if (obs.getObservable().is(SemanticType.COUNTABLE)) { - currentContext.info("Observation " + observation + " accepted in " + - "unresolved state as an acknowledged substantial"); - } else { - // unresolved dependent: context is now inconsistent - currentContext.error("Dependent observation " + observation + " did not " + - "resolve and was rejected. Context is now inconsistent."); - } - }, observation); - - currentContext - .withResolutionConstraints(constraints.toArray(ResolutionConstraint[]::new)) - .observe(observation); - } - - @Override - public ContextScope openNewContext(String contextName) { - if (currentSession == null) { - return null; + } } - var ret = currentSession.createContext(contextName); - if (ret != null) { - contexts.add(currentSession, ret); + } + } + + super.dispatch(sender, event, payload); + } + + private void dispatchServerMessage( + KlabService.ServiceCapabilities capabilities, Message message) { + // TODO do things + System.out.println( + "SERVER MESSAGE FROM " + + capabilities.getType() + + " " + + capabilities.getServiceId() + + ": " + + message); + } + + @Override + public Engine createEngine() { + // TODO first should locate and set the distribution + return new EngineImpl(); + } + + @Override + protected void createView() { + + /* + pre-built view controllers. View implementations will self-register upon creation. + */ + registerViewController(new ServicesViewControllerImpl(this)); + registerViewController(new DistributionViewImplController(this)); + registerViewController(new ResourcesNavigatorControllerImpl(this)); + registerViewController(new ContextInspectorControllerImpl(this)); + registerViewController(new AuthenticationViewControllerImpl(this)); + registerViewController(new ContextControllerImpl(this)); + registerViewController(new KnowledgeInspectorControllerImpl(this)); + // TODO etc. + + /* + panel classes + */ + registerPanelControllerClass(DocumentEditorControllerImpl.class); + } + + @Override + public void switchWorkbenchService( + UIReactor requestingReactor, KlabService.ServiceCapabilities service) { + // TODO + super.switchWorkbenchService(requestingReactor, service); + } + + @Override + public void switchWorkbench(UIReactor requestingReactor, NavigableContainer container) { + if (getUI() != null) { + // we assume that the workspace is mainly intended to show documents and focus on assets. + // Switching the focal container changes all that, so we first clean everything. + getUI().cleanWorkspace(); + } + super.switchWorkbench(requestingReactor, container); + } + + @Override + public void configureWorkbench( + UIReactor requestingReactor, NavigableDocument document, boolean shown) { + // TODO + super.configureWorkbench(requestingReactor, document, shown); + } + + @Override + public void setOption(Option option, Object... payload) { + // TODO validate option + // TODO react + } + + @Override + public void observe(Object asset, boolean adding) { + + if (currentUser() == null) { + throw new KlabAuthorizationException("Cannot make observations with an invalid user"); + } + + /** + * Use cases: + * + *

Admitted with a current context or focal scale + * + *

Concept (from ontology, knowledge explorer/inspector or define) Promote to observable (if + * countable becomes collective) Observable (from define or knowledge inspector) Observe as + * expected Model (from namespace or search) Observe as expected Resource from catalog (local or + * remote) Observe with non-semantic observable Observation from define (can be inline, a + * URN#ID, other) If adding==true, any existing context is preserved and added to If + * adding==false, a new context is created and any previous goes out of focus Observation from + * context tree Just sets the target for the next observations + * + *

ALL can be either an object or a URN or DOI from inside or outside + * + *

Admitted w/o a current context or focal scale + * + *

Observation from define (can be inline, a URN#ID, other) + * + *

If there is no session, must create a default session & select it If there is no context, + * must create a default empty context within the session & select it + */ + if (currentSession == null) { + currentSession = openNewSession("S" + (++sessionCount)); + } + + if (currentContext == null && currentSession != null) { + currentContext = openNewContext("C" + (++contextCount)); + } + + if (currentContext == null) { + user().error("cannot create an observation context: aborting", UI.Interactivity.DISPLAY); + return; + } + + List resolvables = new ArrayList<>(); + List constraints = new ArrayList<>(); + boolean isObserver = false; + + /** Assets are observed by URN unless they're models or observation definitions */ + if (asset instanceof NavigableKlabStatement navigableAsset) { + asset = navigableAsset.getDelegate(); + } + + if (asset instanceof KlabStatement statement) { + + constraints.add( + ResolutionConstraint.of( + ResolutionConstraint.Type.ResolutionNamespace, statement.getNamespace())); + constraints.add( + ResolutionConstraint.of( + ResolutionConstraint.Type.ResolutionProject, statement.getProjectName())); + + if (statement instanceof KimModel model) { + resolvables.add(model.getObservables().getFirst()); + constraints.add( + ResolutionConstraint.of(ResolutionConstraint.Type.UsingModel, model.getUrn())); + } else if (statement instanceof KimSymbolDefinition definition) { + if ("observation".equals(definition.getDefineClass())) { + resolvables.add(statement); + } else if ("observer".equals(definition.getDefineClass())) { + resolvables.add(statement); + isObserver = true; + constraints.add(ResolutionConstraint.of(ResolutionConstraint.Type.UseAsObserver)); } - return ret; - } - - @Override - public SessionScope openNewSession(String sessionName) { - var ret = user().createSession(sessionName); - this.sessions.add(ret); - return ret; - } - - @Override - public List getOpenSessions() { - return new ArrayList<>(sessions); - } - - @Override - public List getOpenContexts() { - return new ArrayList<>(contexts.get(currentSession)); - } - - @Override - public ContextScope getCurrentContext() { - return currentContext; - } - - @Override - public SessionScope getCurrentSession() { - return currentSession; - } - - @Override - public void setCurrentContext(ContextScope context) { - if (context != null && (this.currentSession == null || !this.currentSession.equals(context.getParentScope(Scope.Type.SESSION, SessionScope.class)))) { - throw new KlabIllegalArgumentException("Cannot set context: argument is not part of the current" + - " session"); - } - this.currentContext = context; - } - - @Override - public void setCurrentService(KlabService service) { - // TODO - } - - @Override - public void setCurrentSession(SessionScope session) { - this.currentSession = session; - } - - @Override - public void importProject(String workspaceName, String projectUrl, boolean overwriteExisting) { - - var resources = engine().serviceScope().getService(ResourcesService.class); - if (resources instanceof ResourcesService.Admin admin) { - Thread.ofVirtual().start(() -> { + } else if (statement instanceof KimConceptStatement conceptStatement) { + // TODO check observable vs. context (qualities w/ their context etc.) + resolvables.add(conceptStatement); + } else if (statement instanceof KimObservable conceptStatement) { + // TODO check observable vs. context (qualities w/ their context etc.) + resolvables.add(conceptStatement); + } + } else if (asset instanceof String || asset instanceof Urn) { + resolvables.add(asset.toString()); + } + + /* + TODO add scenario constraints - scenario controller (TBI) should keep them between contexts + */ + + if (resolvables.isEmpty()) { + currentContext.warn("No resolvable assets: observation not started"); + return; + } + + var observation = DigitalTwin.createObservation(currentContext, resolvables.toArray()); + + if (observation == null) { + currentContext.error("Cannot create an observation out of " + asset + ": aborting"); + return; + } + + final boolean observering = isObserver; + + /* one-time event handlers */ + currentContext + .onEvent( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ResolutionSuccessful, + (message) -> { + var obs = message.getPayload(Observation.class); + if (observering) { + setCurrentContext(currentContext.withObserver(obs)); + currentContext.ui( + Message.create( + currentContext, + Message.MessageClass.UserInterface, + Message.MessageType.CurrentContextModified)); + currentContext.info(obs + " is now the current observer"); + } else if (currentContext.getContextObservation() == null + && obs.getObservable().is(SemanticType.SUBJECT)) { + setCurrentContext(currentContext.within(obs)); + currentContext.ui( + Message.create( + currentContext, + Message.MessageClass.UserInterface, + Message.MessageType.CurrentContextModified)); + currentContext.info(obs + " is now the current context observation"); + } else { + currentContext.info("Observation of " + obs + " completed successfully"); + } + }, + observation) + .onEvent( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ResolutionAborted, + (message) -> { + currentContext.error( + "Resolution of observation " + + observation + + " was aborted" + + " due to errors: " + + message.getPayload(Object.class)); + }, + observation) + .onEvent( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ResolutionUnsuccessful, + (message) -> { + var obs = message.getPayload(Observation.class); + if (observering) { + setCurrentContext(currentContext.withObserver(obs)); + // send UI event + currentContext.ui( + Message.create( + currentContext, + Message.MessageClass.UserInterface, + Message.MessageType.CurrentContextModified)); + currentContext.info(obs + " is now the current observer (unresolved)"); + } else if (currentContext.getContextObservation() == null + && obs.getObservable().is(SemanticType.SUBJECT)) { + setCurrentContext(currentContext.within(obs)); + currentContext.ui( + Message.create( + currentContext, + Message.MessageClass.UserInterface, + Message.MessageType.CurrentContextModified)); + currentContext.info(obs + " is now the current context observation (unresolved)"); + } else if (obs.getObservable().is(SemanticType.COUNTABLE)) { + currentContext.info( + "Observation " + + observation + + " accepted in unresolved state as an acknowledged substantial"); + } else { + // unresolved dependent: context is now inconsistent + currentContext.error( + "Dependent observation " + + observation + + " did not resolve and was rejected. Context is now inconsistent."); + } + }, + observation); + + currentContext + .withResolutionConstraints(constraints.toArray(ResolutionConstraint[]::new)) + .observe(observation); + } + + @Override + public ContextScope openNewContext(String contextName) { + if (currentSession == null) { + return null; + } + var ret = currentSession.createContext(contextName); + if (ret != null) { + contexts.add(currentSession, ret); + } + return ret; + } + + @Override + public SessionScope openNewSession(String sessionName) { + var ret = user().createSession(sessionName); + this.sessions.add(ret); + return ret; + } + + @Override + public List getOpenSessions() { + return new ArrayList<>(sessions); + } + + @Override + public List getOpenContexts() { + return new ArrayList<>(contexts.get(currentSession)); + } + + @Override + public ContextScope getCurrentContext() { + return currentContext; + } + + @Override + public SessionScope getCurrentSession() { + return currentSession; + } + + @Override + public void setCurrentContext(ContextScope context) { + if (context != null + && (this.currentSession == null + || !this.currentSession.equals( + context.getParentScope(Scope.Type.SESSION, SessionScope.class)))) { + throw new KlabIllegalArgumentException( + "Cannot set context: argument is not part of the current" + " session"); + } + this.currentContext = context; + } + + @Override + public void setCurrentService(KlabService service) { + // TODO + } + + @Override + public void setCurrentSession(SessionScope session) { + this.currentSession = session; + } + + @Override + public void importProject(String workspaceName, String projectUrl, boolean overwriteExisting) { + + var resources = engine().serviceScope().getService(ResourcesService.class); + if (resources instanceof ResourcesService.Admin admin) { + Thread.ofVirtual() + .start( + () -> { // TODO use import schema, then resolve project to obtain the ResourceSet throw new KlabUnimplementedException("import project"); -// var ret = admin.importProject(workspaceName, projectUrl, overwriteExisting, currentUser()); -// if (ret != null) { -// handleResultSets(ret); -// } - }); - } else if (getUI() != null) { - getUI().alert(Notification.create("Service does not support this operation", - Notification.Level.Warning)); - } - - } - - @Override - public void deleteProject(String projectUrl) { - - if (getUI() != null) { - if (!getUI().confirm(Notification.create("Confirm unrecoverable deletion of project " + projectUrl + "?"))) { - return; - } - } - - var resources = engine().serviceScope().getService(ResourcesService.class); - if (resources instanceof ResourcesService.Admin admin) { - Thread.ofVirtual().start(() -> { + // var ret = admin.importProject(workspaceName, projectUrl, + // overwriteExisting, currentUser()); + // if (ret != null) { + // handleResultSets(ret); + // } + }); + } else if (getUI() != null) { + getUI() + .alert( + Notification.create( + "Service does not support this operation", Notification.Level.Warning)); + } + } + + @Override + public void deleteProject(String projectUrl) { + + if (getUI() != null) { + if (!getUI() + .confirm( + Notification.create( + "Confirm unrecoverable deletion of project " + projectUrl + "?"))) { + return; + } + } + + var resources = engine().serviceScope().getService(ResourcesService.class); + if (resources instanceof ResourcesService.Admin admin) { + Thread.ofVirtual() + .start( + () -> { var ret = admin.deleteProject(projectUrl, currentUser()); handleResultSets(ret); - }); - } else if (getUI() != null) { - getUI().alert(Notification.create("Service does not support this operation", - Notification.Level.Warning)); - } - } - - @Override - public void deleteAsset(NavigableAsset asset) { - - if (getUI() != null) { - if (!getUI().confirm(Notification.create("Confirm unrecoverable deletion of " + asset.getUrn() + "?"))) { - return; - } - } - - var resources = engine().serviceScope().getService(ResourcesService.class); - if (resources instanceof ResourcesService.Admin admin) { - Thread.ofVirtual().start(() -> { + }); + } else if (getUI() != null) { + getUI() + .alert( + Notification.create( + "Service does not support this operation", Notification.Level.Warning)); + } + } + + @Override + public void deleteAsset(NavigableAsset asset) { + + if (getUI() != null) { + if (!getUI() + .confirm( + Notification.create("Confirm unrecoverable deletion of " + asset.getUrn() + "?"))) { + return; + } + } + + var resources = engine().serviceScope().getService(ResourcesService.class); + if (resources instanceof ResourcesService.Admin admin) { + Thread.ofVirtual() + .start( + () -> { var project = asset.parent(NavigableProject.class); var ret = admin.deleteDocument(project.getUrn(), asset.getUrn(), currentUser()); handleResultSets(ret); - }); - } else if (getUI() != null) { - getUI().alert(Notification.create("Service does not support this operation", - Notification.Level.Warning)); - } - - } - - @Override - public void manageProject(String projectId, RepositoryState.Operation operation, String... arguments) { - - var resources = engine().serviceScope().getService(ResourcesService.class); - if (resources instanceof ResourcesService.Admin admin) { - Thread.ofVirtual().start(() -> { + }); + } else if (getUI() != null) { + getUI() + .alert( + Notification.create( + "Service does not support this operation", Notification.Level.Warning)); + } + } + + @Override + public void manageProject( + String projectId, RepositoryState.Operation operation, String... arguments) { + + var resources = engine().serviceScope().getService(ResourcesService.class); + if (resources instanceof ResourcesService.Admin admin) { + Thread.ofVirtual() + .start( + () -> { var ret = admin.manageRepository(projectId, operation, arguments); handleResultSets(ret); - }); - } else if (getUI() != null) { - getUI().alert(Notification.create("Service does not support this operation", - Notification.Level.Warning)); - } - } - - private void handleResultSets(List ret) { - if (ret != null && !ret.isEmpty()) { - for (var change : ret) { - dispatch(this, UIEvent.WorkspaceModified, getUI() == null ? change : - getUI().processAlerts(change)); - } - } - } - - @Override - public void editProperties(String projectId) { - - } - - @Override - public void createDocument(String newDocumentUrn, String projectName, - ProjectStorage.ResourceType documentType) { - var resources = engine().serviceScope().getService(ResourcesService.class); - if (resources instanceof ResourcesService.Admin admin) { - Thread.ofVirtual().start(() -> { - var changes = admin.createDocument(projectName, newDocumentUrn, documentType, currentUser()); + }); + } else if (getUI() != null) { + getUI() + .alert( + Notification.create( + "Service does not support this operation", Notification.Level.Warning)); + } + } + + private void handleResultSets(List ret) { + if (ret != null && !ret.isEmpty()) { + for (var change : ret) { + dispatch( + this, + UIEvent.WorkspaceModified, + getUI() == null ? change : getUI().processAlerts(change)); + } + } + } + + @Override + public void editProperties(String projectId) {} + + @Override + public void createDocument( + String newDocumentUrn, String projectName, ProjectStorage.ResourceType documentType) { + var resources = engine().serviceScope().getService(ResourcesService.class); + if (resources instanceof ResourcesService.Admin admin) { + Thread.ofVirtual() + .start( + () -> { + var changes = + admin.createDocument(projectName, newDocumentUrn, documentType, currentUser()); if (changes != null) { - for (var change : changes) { - dispatch(this, UIEvent.WorkspaceModified, getUI() == null ? change : - getUI().processAlerts(change)); - } + for (var change : changes) { + dispatch( + this, + UIEvent.WorkspaceModified, + getUI() == null ? change : getUI().processAlerts(change)); + } } - }); - } else if (getUI() != null) { - getUI().alert(Notification.create("Service does not support this operation", - Notification.Level.Warning)); - } - } - - @Override - public UserScope user() { - return ((EngineImpl) engine()).getUser(); - } - - @Override - public void setDefaultService(KlabService.ServiceCapabilities service) { - if (engine() instanceof EngineImpl engine) { - engine.setDefaultService(service); - } else { - engine().serviceScope().warn("Modeler: request to set default service wasn't honored " + - "because " + "the engine " + "implementation is overridden"); - } - } - - @Override - protected Scope scope() { - return user(); - } - - @Override - public String configurationPath() { - return "modeler"; - } - - public UserScope currentUser() { - return engine() == null || engine().getUsers().isEmpty() ? null : engine().getUsers().getFirst(); - } - - @Override - public URL serviceUrl(String serviceId) { - return serviceUrls.get(serviceId); - } - - @Override - public UIController getController() { - return this; - } + }); + } else if (getUI() != null) { + getUI() + .alert( + Notification.create( + "Service does not support this operation", Notification.Level.Warning)); + } + } + + @Override + public UserScope user() { + return ((EngineImpl) engine()).getUser(); + } + + @Override + public void setDefaultService(KlabService.ServiceCapabilities service) { + if (engine() instanceof EngineImpl engine) { + engine.setDefaultService(service); + } else { + engine() + .serviceScope() + .warn( + "Modeler: request to set default service wasn't honored " + + "because the engine implementation is overridden"); + } + } + + @Override + protected Scope scope() { + return user(); + } + + @Override + public String configurationPath() { + return "modeler"; + } + + public UserScope currentUser() { + return engine() == null || engine().getUsers().isEmpty() + ? null + : engine().getUsers().getFirst(); + } + + @Override + public URL serviceUrl(String serviceId) { + return serviceUrls.get(serviceId); + } + + @Override + public UIController getController() { + return this; + } + + @Override + public Scope getCurrentScope() { + if (currentContext != null) { + return currentContext; + } + if (currentSession != null) { + return currentSession; + } + return user(); + } } diff --git a/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/AdminController.java b/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/AdminController.java index 58bc8982a..f345d3fd5 100644 --- a/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/AdminController.java +++ b/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/AdminController.java @@ -27,35 +27,35 @@ @Secured(Role.ADMINISTRATOR) public class AdminController implements ServicesAPI.REASONER.ADMIN { - @Autowired - private ReasonerServer reasoner; - - @PostMapping(LOAD_KNOWLEDGE) - public @ResponseBody ResourceSet loadKnowledge(@RequestBody Worldview resources, Principal principal) { - if (principal instanceof EngineAuthorization authorization) { - var userScope = authorization.getScope(UserScope.class); - return reasoner.klabService().loadKnowledge(resources, userScope); - } - return ResourceSet.empty(); + @Autowired private ReasonerServer reasoner; + + @PostMapping(LOAD_KNOWLEDGE) + public @ResponseBody ResourceSet loadKnowledge( + @RequestBody Worldview resources, Principal principal) { + if (principal instanceof EngineAuthorization authorization) { + var userScope = authorization.getScope(UserScope.class); + return reasoner.klabService().loadKnowledge(resources, userScope); } - - @PostMapping(UPDATE_KNOWLEDGE) - public @ResponseBody ResourceSet loadKnowledge(@RequestBody ResourceSet changes, Principal principal) { - if (principal instanceof EngineAuthorization authorization) { - var userScope = authorization.getScope(UserScope.class); - return reasoner.klabService().updateKnowledge(changes, userScope); - } - return ResourceSet.empty(); + return ResourceSet.empty(); + } + + @PostMapping(UPDATE_KNOWLEDGE) + public @ResponseBody ResourceSet loadKnowledge( + @RequestBody ResourceSet changes, Principal principal) { + if (principal instanceof EngineAuthorization authorization) { + var userScope = authorization.getScope(UserScope.class); + return reasoner.klabService().updateKnowledge(changes, userScope); } - - @PostMapping(DEFINE_CONCEPT) - public @ResponseBody Concept defineConcept(@RequestBody KimConceptStatement statement, Principal principal) { - if (principal instanceof EngineAuthorization authorization) { - var userScope = authorization.getScope(UserScope.class); - return reasoner.klabService().defineConcept(statement, userScope); - } - return null; + return ResourceSet.empty(); + } + + @PostMapping(DEFINE_CONCEPT) + public @ResponseBody Concept defineConcept( + @RequestBody KimConceptStatement statement, Principal principal) { + if (principal instanceof EngineAuthorization authorization) { + var userScope = authorization.getScope(UserScope.class); + return reasoner.klabService().defineConcept(statement, userScope); } - - + return null; + } } diff --git a/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/AssistController.java b/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/AssistController.java index 72757654d..6fc0d23f1 100644 --- a/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/AssistController.java +++ b/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/AssistController.java @@ -14,12 +14,12 @@ @RestController public class AssistController { - @Autowired - ReasonerServer reasoner; + @Autowired ReasonerServer reasoner; -// @ApiOperation(value = "Perform guided semantic search, composing a valid logical expression incrementally") - @PostMapping(ServicesAPI.REASONER.SEMANTIC_SEARCH) - SemanticSearchResponse semanticSearch(SemanticSearchRequest request) { - return reasoner.klabService().semanticSearch(request); - } + // @ApiOperation(value = "Perform guided semantic search, composing a valid logical expression + // incrementally") + @PostMapping(ServicesAPI.REASONER.SEMANTIC_SEARCH) + SemanticSearchResponse semanticSearch(SemanticSearchRequest request) { + return reasoner.klabService().semanticSearch(request); + } } diff --git a/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/ReasonerController.java b/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/ReasonerController.java index 963a782c5..3dc99a703 100644 --- a/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/ReasonerController.java +++ b/klab.services.reasoner.server/src/main/java/org/integratedmodelling/klab/services/reasoner/controllers/ReasonerController.java @@ -24,443 +24,476 @@ @RestController public class ReasonerController { - @Autowired - private ReasonerServer reasoner; - - /** - * GET /resolve/concept - * - * @param definition - * @return - */ - @PostMapping(ServicesAPI.REASONER.RESOLVE_CONCEPT) - public @ResponseBody Concept resolveConcept(@RequestBody String definition) { - return reasoner.klabService().resolveConcept(definition); - } - - @PostMapping(ServicesAPI.REASONER.COMPUTE_OBSERVATION_STRATEGIES) - public @ResponseBody List inferStrategies(@RequestBody ResolutionRequest request, - Principal principal) { - if (principal instanceof EngineAuthorization authorization) { - var contextScope = - authorization.getScope(ContextScope.class).withResolutionConstraints( - request.getResolutionConstraints().toArray(new ResolutionConstraint[0])); - return reasoner.klabService().computeObservationStrategies( - request.getObservation(), - contextScope); - } - throw new KlabInternalErrorException("Unexpected implementation of request authorization"); - } - - /** - * /resolve/observable - * - * @param definition - * @return - */ - @PostMapping(ServicesAPI.REASONER.RESOLVE_OBSERVABLE) - public @ResponseBody Observable resolveObservable(@RequestBody String definition) { - return reasoner.klabService().resolveObservable(definition); - } - - @PostMapping(ServicesAPI.REASONER.DECLARE_OBSERVABLE) - public @ResponseBody Observable declareObservable(@RequestBody DeclarationRequest request) { - return request.getObservableDeclaration().getPattern() == null ? - reasoner.klabService().declareObservable(request.getObservableDeclaration()) : - reasoner.klabService().declareObservable( - request.getObservableDeclaration(), - request.getPatternVariables()); - } - - @PostMapping(ServicesAPI.REASONER.DECLARE_CONCEPT) - public @ResponseBody Concept declareConcept(@RequestBody DeclarationRequest request) { - return request.getConceptDeclaration().isPattern() ? - reasoner.klabService().declareConcept(request.getConceptDeclaration()) : - reasoner.klabService().declareConcept( - request.getConceptDeclaration(), - request.getPatternVariables()); - } - - /** - * POST /subsumes - * - * @param concepts - * @return - */ - @PostMapping(ServicesAPI.REASONER.SUBSUMES) - public boolean subsumes(@RequestBody Concept[] concepts) { - return reasoner.klabService().is(concepts[0], concepts[1]); - } - - /** - * POST /matches - * - * @param concepts - * @return - */ - @PostMapping(ServicesAPI.REASONER.MATCHES) - public boolean matches(@RequestBody Concept[] concepts) { - return reasoner.klabService().match(concepts[0], concepts[1]); - } - - - /** - * POST /operands - * - * @param target - * @return - */ - @PostMapping(ServicesAPI.REASONER.OPERANDS) - public @ResponseBody Collection operands(@RequestBody Concept target) { - return reasoner.klabService().operands(target); - } - - /** - * POST /children - * - * @param target - * @return - */ - @PostMapping(ServicesAPI.REASONER.CHILDREN) - public @ResponseBody Collection children(@RequestBody Concept target) { - return reasoner.klabService().children(target); - } - - /** - * POST /parents - * - * @param target - * @return - */ - @PostMapping(ServicesAPI.REASONER.PARENTS) - public @ResponseBody Collection parents(@RequestBody Concept target) { - return reasoner.klabService().parents(target); - } - - /** - * POST /parent - * - * @param c - * @return - */ - @PostMapping(ServicesAPI.REASONER.PARENT) - public @ResponseBody Concept parent(@RequestBody Concept c) { - return reasoner.klabService().parent(c); - } - - /** - * POST /allchildren - * - * @param target - * @return - */ - @PostMapping(ServicesAPI.REASONER.ALL_CHILDREN) - public @ResponseBody Collection allChildren(@RequestBody Concept target) { - return reasoner.klabService().allChildren(target); - } - - /** - * POST /allparents - * - * @param target - * @return - */ - @PostMapping(ServicesAPI.REASONER.ALL_PARENTS) - public @ResponseBody Collection allParents(@RequestBody Concept target) { - return reasoner.klabService().allParents(target); - } - - /** - * POST /closure - * - * @param target - * @return - */ - @PostMapping(ServicesAPI.REASONER.CLOSURE) - public @ResponseBody Collection closure(@RequestBody Concept target) { - return reasoner.klabService().closure(target); - } - - @PostMapping(ServicesAPI.REASONER.CORE_OBSERVABLE) - public @ResponseBody Concept coreObservable(@RequestBody Concept first) { - return reasoner.klabService().coreObservable(first); - } - - @PostMapping(ServicesAPI.REASONER.SPLIT_OPERATORS) - public @ResponseBody Pair> splitOperators(@RequestBody Concept concept) { - return reasoner.klabService().splitOperators(concept); - } - - // @ApiOperation("Asserted or semantic distance between two concepts. If asserted is false (default) - // the asserted " - // + " distance will be returned as an integer. Otherwise, the semantic distance will be - // computed and " - // + "the input data array may contain a third concept to compute the distance in its - // context.") - @PostMapping(ServicesAPI.REASONER.DISTANCE) - public int assertedDistance(@RequestBody Concept[] concepts, - @RequestParam(name = "asserted", - defaultValue = "false") boolean asserted) { - return asserted - ? reasoner.klabService().assertedDistance(concepts[0], concepts[1]) - : reasoner.klabService().semanticDistance(concepts[0], concepts[1], concepts.length == 2 ? - null : concepts[2]); - } - - @PostMapping(ServicesAPI.REASONER.ROLES) - public @ResponseBody Collection roles(@RequestBody Concept concept, - @RequestParam(name = "direct", - defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().directRoles(concept) : reasoner.klabService().roles(concept); - } - - @PostMapping(ServicesAPI.REASONER.HAS_ROLE) - public boolean hasRole(@RequestBody Concept[] concept, - @RequestParam(name = "direct", - defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().hasDirectRole(concept[0], concept[1]) : - reasoner.klabService().hasRole(concept[0], concept[1]); - } - - // @PostMapping(ServicesAPI.REASONER.CONTEXT) - // public @ResponseBody Concept directContext(@RequestBody Concept concept, @RequestParam - // (defaultValue = "false") boolean direct) { - // return direct ? reasoner.klabService().directContext(concept) : reasoner.klabService() - // .context(concept); - // } - - @PostMapping(ServicesAPI.REASONER.INHERENT) - public @ResponseBody Concept inherent(@RequestBody Concept concept, @RequestParam(name = "direct", - defaultValue = "false" - ) boolean direct) { - return direct ? reasoner.klabService().directInherent(concept) : - reasoner.klabService().inherent(concept); - } - - @PostMapping(ServicesAPI.REASONER.GOAL) - public @ResponseBody Concept goal(@RequestBody Concept concept, - @RequestParam(name = "direct", - defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().directGoal(concept) : reasoner.klabService().goal(concept); - } - - @PostMapping(ServicesAPI.REASONER.COOCCURRENT) - public @ResponseBody Concept cooccurrent(@RequestBody Concept concept, @RequestParam(name = "direct", - defaultValue = - "false") boolean direct) { - return direct ? reasoner.klabService().directCooccurrent(concept) : - reasoner.klabService().cooccurrent(concept); - } - - @PostMapping(ServicesAPI.REASONER.CAUSANT) - public Concept causant(@RequestBody Concept concept, - @RequestParam(name="direct", defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().directCausant(concept) : - reasoner.klabService().causant(concept); - } - - @PostMapping(ServicesAPI.REASONER.CAUSED) - public Concept caused(@RequestBody Concept concept, - @RequestParam(name="direct", defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().directCaused(concept) : reasoner.klabService().caused(concept); - } - - @PostMapping(ServicesAPI.REASONER.ADJACENT) - public Concept adjacent(@RequestBody Concept concept, - @RequestParam(name="direct", defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().directAdjacent(concept) : - reasoner.klabService().adjacent(concept); - } - - @PostMapping(ServicesAPI.REASONER.COMPRESENT) - public Concept compresent(@RequestBody Concept concept, - @RequestParam(name="direct", defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().directCompresent(concept) : - reasoner.klabService().compresent(concept); - } - - @PostMapping(ServicesAPI.REASONER.RELATIVE_TO) - public Concept relativeTo(@RequestBody Concept concept) { - return reasoner.klabService().relativeTo(concept); - } - - @PostMapping(ServicesAPI.REASONER.TRAITS) - public Collection traits(@RequestBody Concept concept, @RequestParam(name = "direct", - defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().directTraits(concept) : reasoner.klabService().traits(concept); - } - - @PostMapping(ServicesAPI.REASONER.IDENTITIES) - public Collection identities(@RequestBody Concept concept, @RequestParam(name = "direct", - defaultValue = "false" - ) boolean direct) { - return direct ? reasoner.klabService().directIdentities(concept) : - reasoner.klabService().identities(concept); - } - - @PostMapping(ServicesAPI.REASONER.ATTRIBUTES) - public Collection attributes(@RequestBody Concept concept, @RequestParam(name = "direct", - defaultValue = "false" - ) boolean direct) { - return direct ? reasoner.klabService().directAttributes(concept) : - reasoner.klabService().attributes(concept); - } - - @PostMapping(ServicesAPI.REASONER.REALMS) - public Collection realms(@RequestBody Concept concept, @RequestParam(name = "direct", - defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().directRealms(concept) : reasoner.klabService().realms(concept); - } - - @PostMapping(ServicesAPI.REASONER.BASE_PARENT_TRAIT) - public Concept baseParentTrait(@RequestBody Concept trait) { - return reasoner.klabService().baseParentTrait(trait); - } - - @PostMapping(ServicesAPI.REASONER.BASE_OBSERVABLE) - public Concept baseObservable(@RequestBody Concept observable) { - return reasoner.klabService().baseObservable(observable); - } - - @PostMapping(ServicesAPI.REASONER.RAW_OBSERVABLE) - public Concept rawObservable(@RequestBody Concept observable) { - return reasoner.klabService().rawObservable(observable); - } - - @PostMapping(ServicesAPI.REASONER.HAS_TRAIT) - public boolean hasTrait(Semantics type, Concept trait, - @RequestParam(name="direct", defaultValue = "false") boolean direct) { - return direct ? reasoner.klabService().hasDirectTrait(type, trait) : - reasoner.klabService().hasTrait(type, trait); - } - - @PostMapping(ServicesAPI.REASONER.HAS_PARENT_ROLE) - public boolean hasParentRole(@RequestBody Concept o1, Concept t) { - return reasoner.klabService().hasParentRole(o1, t); - } - - public String displayName(@RequestBody Concept semantics) { - return reasoner.klabService().displayName(semantics); - } - - public String displayLabel(@RequestBody Concept concept) { - return reasoner.klabService().displayLabel(concept); - } - - public String style(@RequestBody Concept concept) { - return reasoner.klabService().style(concept); - } - - @PostMapping(ServicesAPI.REASONER.SEMANTIC_TYPE) - public SemanticType observableType(@RequestBody Concept observable, @RequestParam(name="acceptTraits", defaultValue = "false" - ) boolean acceptTraits) { - return reasoner.klabService().observableType(observable, acceptTraits); - } - - @PostMapping(ServicesAPI.REASONER.RELATIONSHIP_SOURCE) - public Concept relationshipSource(@RequestBody Concept relationship) { - return reasoner.klabService().relationshipSource(relationship); - } - - @PostMapping(ServicesAPI.REASONER.RELATIONSHIP_SOURCES) - public Collection relationshipSources(@RequestBody Concept relationship) { - return reasoner.klabService().relationshipSources(relationship); - } - - @PostMapping(ServicesAPI.REASONER.RELATIONSHIP_TARGET) - public Concept relationshipTarget(@RequestBody Concept relationship) { - return reasoner.klabService().relationshipTarget(relationship); - } - - @PostMapping(ServicesAPI.REASONER.RELATIONSHIP_TARGETS) - public Collection relationshipTargets(@RequestBody Concept relationship) { - return reasoner.klabService().relationshipTargets(relationship); - } - - @PostMapping(ServicesAPI.REASONER.NEGATED) - public Concept negated(@RequestBody Concept concept) { - return reasoner.klabService().negated(concept); - } - - @PostMapping(ServicesAPI.REASONER.SATISFIABLE) - public boolean satisfiable(@RequestBody Concept ret) { - return reasoner.klabService().satisfiable(ret); - } - - @PostMapping(ServicesAPI.REASONER.DOMAIN) - public Semantics domain(@RequestBody Concept conceptImpl) { - return reasoner.klabService().domain(conceptImpl); - } - - @PostMapping(ServicesAPI.REASONER.APPLICABLE) - public Collection applicableObservables(@RequestBody Concept main) { - return reasoner.klabService().applicableObservables(main); - } - - @PostMapping(ServicesAPI.REASONER.DESCRIBED) - public Concept describedType(@RequestBody Concept concept) { - return reasoner.klabService().describedType(concept); - } - - @PostMapping(ServicesAPI.REASONER.COMPATIBLE) - public boolean compatible(@RequestBody Concept[] args) { - return reasoner.klabService().compatible(args[0], args[1]); - } - - @PostMapping(ServicesAPI.REASONER.CONTEXTUALLY_COMPATIBLE) - public boolean contextuallyCompatible(@RequestBody Concept[] args) { - return reasoner.klabService().contextuallyCompatible(args[0], args[1], args[2]); - } - - @PostMapping(ServicesAPI.REASONER.OCCURRENT) - public boolean occurrent(@RequestBody Concept concept) { - return reasoner.klabService().occurrent(concept); - } - - @PostMapping(ServicesAPI.REASONER.LGC) - public Concept leastGeneralCommon(@RequestBody Collection cc) { - return reasoner.klabService().leastGeneralCommon(cc); - } - - @PostMapping(ServicesAPI.REASONER.AFFECTED_BY) - public boolean affectedBy(@RequestBody Concept[] args) { - return reasoner.klabService().affectedBy(args[0], args[1]); - } - - @PostMapping(ServicesAPI.REASONER.CREATED_BY) - public boolean createdBy(@RequestBody Concept[] args) { - return reasoner.klabService().createdBy(args[0], args[1]); - } - - @PostMapping(ServicesAPI.REASONER.AFFECTED_OR_CREATED) - public Collection affectedOrCreated(@RequestBody Concept semantics) { - return reasoner.klabService().affectedOrCreated(semantics); - } - - @PostMapping(ServicesAPI.REASONER.AFFECTED) - public Collection affected(@RequestBody Concept semantics) { - return reasoner.klabService().affected(semantics); - } - - @PostMapping(ServicesAPI.REASONER.CREATED) - public Collection created(@RequestBody Concept semantics) { - return reasoner.klabService().created(semantics); - } - - @PostMapping(ServicesAPI.REASONER.ROLES_FOR) - public Collection rolesFor(@RequestBody Concept[] args) { - return reasoner.klabService().rolesFor(args[0], args.length == 1 ? null : args[1]); - } - - @PostMapping(ServicesAPI.REASONER.IMPLIED_ROLE) - public Concept impliedRole(@RequestBody Concept[] args) { - return reasoner.klabService().impliedRole(args[0], args.length == 1 ? null : args[1]); - } - - @PostMapping(ServicesAPI.REASONER.IMPLIED_ROLES) - public @ResponseBody Collection impliedRoles(@RequestBody Concept role, - @RequestParam(name="includeRelationshipEndpoints", defaultValue = "false") boolean includeRelationshipEndpoints) { - return reasoner.klabService().impliedRoles(role, includeRelationshipEndpoints); - } - + @Autowired private ReasonerServer reasoner; + + /** + * GET /resolve/concept + * + * @param definition + * @return + */ + @PostMapping(ServicesAPI.REASONER.RESOLVE_CONCEPT) + public @ResponseBody Concept resolveConcept(@RequestBody String definition) { + return reasoner.klabService().resolveConcept(definition); + } + + @PostMapping(ServicesAPI.REASONER.COMPUTE_OBSERVATION_STRATEGIES) + public @ResponseBody List inferStrategies( + @RequestBody ResolutionRequest request, Principal principal) { + if (principal instanceof EngineAuthorization authorization) { + var contextScope = + authorization + .getScope(ContextScope.class) + .withResolutionConstraints( + request.getResolutionConstraints().toArray(new ResolutionConstraint[0])); + return reasoner + .klabService() + .computeObservationStrategies(request.getObservation(), contextScope); + } + throw new KlabInternalErrorException("Unexpected implementation of request authorization"); + } + + /** + * /resolve/observable + * + * @param definition + * @return + */ + @PostMapping(ServicesAPI.REASONER.RESOLVE_OBSERVABLE) + public @ResponseBody Observable resolveObservable(@RequestBody String definition) { + return reasoner.klabService().resolveObservable(definition); + } + + @PostMapping(ServicesAPI.REASONER.DECLARE_OBSERVABLE) + public @ResponseBody Observable declareObservable(@RequestBody DeclarationRequest request) { + return request.getObservableDeclaration().getPattern() == null + ? reasoner.klabService().declareObservable(request.getObservableDeclaration()) + : reasoner + .klabService() + .declareObservable(request.getObservableDeclaration(), request.getPatternVariables()); + } + + @PostMapping(ServicesAPI.REASONER.DECLARE_CONCEPT) + public @ResponseBody Concept declareConcept(@RequestBody DeclarationRequest request) { + return request.getConceptDeclaration().isPattern() + ? reasoner.klabService().declareConcept(request.getConceptDeclaration()) + : reasoner + .klabService() + .declareConcept(request.getConceptDeclaration(), request.getPatternVariables()); + } + + /** + * POST /subsumes + * + * @param concepts + * @return + */ + @PostMapping(ServicesAPI.REASONER.SUBSUMES) + public boolean subsumes(@RequestBody Concept[] concepts) { + return reasoner.klabService().is(concepts[0], concepts[1]); + } + + /** + * POST /matches + * + * @param concepts + * @return + */ + @PostMapping(ServicesAPI.REASONER.MATCHES) + public boolean matches(@RequestBody Concept[] concepts) { + return reasoner.klabService().match(concepts[0], concepts[1]); + } + + /** + * POST /operands + * + * @param target + * @return + */ + @PostMapping(ServicesAPI.REASONER.OPERANDS) + public @ResponseBody Collection operands(@RequestBody Concept target) { + return reasoner.klabService().operands(target); + } + + /** + * POST /children + * + * @param target + * @return + */ + @PostMapping(ServicesAPI.REASONER.CHILDREN) + public @ResponseBody Collection children(@RequestBody Concept target) { + return reasoner.klabService().children(target); + } + + /** + * POST /parents + * + * @param target + * @return + */ + @PostMapping(ServicesAPI.REASONER.PARENTS) + public @ResponseBody Collection parents(@RequestBody Concept target) { + return reasoner.klabService().parents(target); + } + + /** + * POST /parent + * + * @param c + * @return + */ + @PostMapping(ServicesAPI.REASONER.PARENT) + public @ResponseBody Concept parent(@RequestBody Concept c) { + return reasoner.klabService().parent(c); + } + + /** + * POST /allchildren + * + * @param target + * @return + */ + @PostMapping(ServicesAPI.REASONER.ALL_CHILDREN) + public @ResponseBody Collection allChildren(@RequestBody Concept target) { + return reasoner.klabService().allChildren(target); + } + + /** + * POST /allparents + * + * @param target + * @return + */ + @PostMapping(ServicesAPI.REASONER.ALL_PARENTS) + public @ResponseBody Collection allParents(@RequestBody Concept target) { + return reasoner.klabService().allParents(target); + } + + /** + * POST /closure + * + * @param target + * @return + */ + @PostMapping(ServicesAPI.REASONER.CLOSURE) + public @ResponseBody Collection closure(@RequestBody Concept target) { + return reasoner.klabService().closure(target); + } + + @PostMapping(ServicesAPI.REASONER.CORE_OBSERVABLE) + public @ResponseBody Concept coreObservable(@RequestBody Concept first) { + return reasoner.klabService().coreObservable(first); + } + + @PostMapping(ServicesAPI.REASONER.SPLIT_OPERATORS) + public @ResponseBody Pair> splitOperators( + @RequestBody Concept concept) { + return reasoner.klabService().splitOperators(concept); + } + + // @ApiOperation("Asserted or semantic distance between two concepts. If asserted is false + // (default) + // the asserted " + // + " distance will be returned as an integer. Otherwise, the semantic distance will + // be + // computed and " + // + "the input data array may contain a third concept to compute the distance in its + // context.") + @PostMapping(ServicesAPI.REASONER.DISTANCE) + public int assertedDistance( + @RequestBody Concept[] concepts, + @RequestParam(name = "asserted", defaultValue = "false") boolean asserted) { + return asserted + ? reasoner.klabService().assertedDistance(concepts[0], concepts[1]) + : reasoner + .klabService() + .semanticDistance(concepts[0], concepts[1], concepts.length == 2 ? null : concepts[2]); + } + + @PostMapping(ServicesAPI.REASONER.ROLES) + public @ResponseBody Collection roles( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directRoles(concept) + : reasoner.klabService().roles(concept); + } + + @PostMapping(ServicesAPI.REASONER.HAS_ROLE) + public boolean hasRole( + @RequestBody Concept[] concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().hasDirectRole(concept[0], concept[1]) + : reasoner.klabService().hasRole(concept[0], concept[1]); + } + + // @PostMapping(ServicesAPI.REASONER.CONTEXT) + // public @ResponseBody Concept directContext(@RequestBody Concept concept, @RequestParam + // (defaultValue = "false") boolean direct) { + // return direct ? reasoner.klabService().directContext(concept) : reasoner.klabService() + // .context(concept); + // } + + @PostMapping(ServicesAPI.REASONER.INHERENT) + public @ResponseBody Concept inherent( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directInherent(concept) + : reasoner.klabService().inherent(concept); + } + + @PostMapping(ServicesAPI.REASONER.GOAL) + public @ResponseBody Concept goal( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directGoal(concept) + : reasoner.klabService().goal(concept); + } + + @PostMapping(ServicesAPI.REASONER.COOCCURRENT) + public @ResponseBody Concept cooccurrent( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directCooccurrent(concept) + : reasoner.klabService().cooccurrent(concept); + } + + @PostMapping(ServicesAPI.REASONER.CAUSANT) + public Concept causant( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directCausant(concept) + : reasoner.klabService().causant(concept); + } + + @PostMapping(ServicesAPI.REASONER.CAUSED) + public Concept caused( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directCaused(concept) + : reasoner.klabService().caused(concept); + } + + @PostMapping(ServicesAPI.REASONER.ADJACENT) + public Concept adjacent( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directAdjacent(concept) + : reasoner.klabService().adjacent(concept); + } + + @PostMapping(ServicesAPI.REASONER.COMPRESENT) + public Concept compresent( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directCompresent(concept) + : reasoner.klabService().compresent(concept); + } + + @PostMapping(ServicesAPI.REASONER.RELATIVE_TO) + public Concept relativeTo(@RequestBody Concept concept) { + return reasoner.klabService().relativeTo(concept); + } + + @PostMapping(ServicesAPI.REASONER.TRAITS) + public Collection traits( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directTraits(concept) + : reasoner.klabService().traits(concept); + } + + @PostMapping(ServicesAPI.REASONER.IDENTITIES) + public Collection identities( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directIdentities(concept) + : reasoner.klabService().identities(concept); + } + + @PostMapping(ServicesAPI.REASONER.ATTRIBUTES) + public Collection attributes( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directAttributes(concept) + : reasoner.klabService().attributes(concept); + } + + @PostMapping(ServicesAPI.REASONER.REALMS) + public Collection realms( + @RequestBody Concept concept, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().directRealms(concept) + : reasoner.klabService().realms(concept); + } + + @PostMapping(ServicesAPI.REASONER.BASE_PARENT_TRAIT) + public Concept baseParentTrait(@RequestBody Concept trait) { + return reasoner.klabService().baseParentTrait(trait); + } + + @PostMapping(ServicesAPI.REASONER.BASE_OBSERVABLE) + public Concept baseObservable(@RequestBody Concept observable) { + return reasoner.klabService().baseObservable(observable); + } + + @PostMapping(ServicesAPI.REASONER.RAW_OBSERVABLE) + public Concept rawObservable(@RequestBody Concept observable) { + return reasoner.klabService().rawObservable(observable); + } + + @PostMapping(ServicesAPI.REASONER.HAS_TRAIT) + public boolean hasTrait( + Semantics type, + Concept trait, + @RequestParam(name = "direct", defaultValue = "false") boolean direct) { + return direct + ? reasoner.klabService().hasDirectTrait(type, trait) + : reasoner.klabService().hasTrait(type, trait); + } + + @PostMapping(ServicesAPI.REASONER.HAS_PARENT_ROLE) + public boolean hasParentRole(@RequestBody Concept o1, Concept t) { + return reasoner.klabService().hasParentRole(o1, t); + } + + public String displayName(@RequestBody Concept semantics) { + return reasoner.klabService().displayName(semantics); + } + + public String displayLabel(@RequestBody Concept concept) { + return reasoner.klabService().displayLabel(concept); + } + + public String style(@RequestBody Concept concept) { + return reasoner.klabService().style(concept); + } + + @PostMapping(ServicesAPI.REASONER.SEMANTIC_TYPE) + public SemanticType observableType( + @RequestBody Concept observable, + @RequestParam(name = "acceptTraits", defaultValue = "false") boolean acceptTraits) { + return reasoner.klabService().observableType(observable, acceptTraits); + } + + @PostMapping(ServicesAPI.REASONER.RELATIONSHIP_SOURCE) + public Concept relationshipSource(@RequestBody Concept relationship) { + return reasoner.klabService().relationshipSource(relationship); + } + + @PostMapping(ServicesAPI.REASONER.RELATIONSHIP_SOURCES) + public Collection relationshipSources(@RequestBody Concept relationship) { + return reasoner.klabService().relationshipSources(relationship); + } + + @PostMapping(ServicesAPI.REASONER.RELATIONSHIP_TARGET) + public Concept relationshipTarget(@RequestBody Concept relationship) { + return reasoner.klabService().relationshipTarget(relationship); + } + + @PostMapping(ServicesAPI.REASONER.RELATIONSHIP_TARGETS) + public Collection relationshipTargets(@RequestBody Concept relationship) { + return reasoner.klabService().relationshipTargets(relationship); + } + + @PostMapping(ServicesAPI.REASONER.NEGATED) + public Concept negated(@RequestBody Concept concept) { + return reasoner.klabService().negated(concept); + } + + @PostMapping(ServicesAPI.REASONER.SATISFIABLE) + public boolean satisfiable(@RequestBody Concept ret) { + return reasoner.klabService().satisfiable(ret); + } + + @PostMapping(ServicesAPI.REASONER.DOMAIN) + public Semantics domain(@RequestBody Concept conceptImpl) { + return reasoner.klabService().domain(conceptImpl); + } + + @PostMapping(ServicesAPI.REASONER.APPLICABLE) + public Collection applicableObservables(@RequestBody Concept main) { + return reasoner.klabService().applicableObservables(main); + } + + @PostMapping(ServicesAPI.REASONER.DESCRIBED) + public Concept describedType(@RequestBody Concept concept) { + return reasoner.klabService().describedType(concept); + } + + @PostMapping(ServicesAPI.REASONER.COMPATIBLE) + public boolean compatible(@RequestBody Concept[] args) { + return reasoner.klabService().compatible(args[0], args[1]); + } + + @PostMapping(ServicesAPI.REASONER.CONTEXTUALLY_COMPATIBLE) + public boolean contextuallyCompatible(@RequestBody Concept[] args) { + return reasoner.klabService().contextuallyCompatible(args[0], args[1], args[2]); + } + + @PostMapping(ServicesAPI.REASONER.OCCURRENT) + public boolean occurrent(@RequestBody Concept concept) { + return reasoner.klabService().occurrent(concept); + } + + @PostMapping(ServicesAPI.REASONER.LGC) + public Concept leastGeneralCommon(@RequestBody Collection cc) { + return reasoner.klabService().leastGeneralCommon(cc); + } + + @PostMapping(ServicesAPI.REASONER.AFFECTED_BY) + public boolean affectedBy(@RequestBody Concept[] args) { + return reasoner.klabService().affectedBy(args[0], args[1]); + } + + @PostMapping(ServicesAPI.REASONER.CREATED_BY) + public boolean createdBy(@RequestBody Concept[] args) { + return reasoner.klabService().createdBy(args[0], args[1]); + } + + @PostMapping(ServicesAPI.REASONER.AFFECTED_OR_CREATED) + public Collection affectedOrCreated(@RequestBody Concept semantics) { + return reasoner.klabService().affectedOrCreated(semantics); + } + + @PostMapping(ServicesAPI.REASONER.AFFECTED) + public Collection affected(@RequestBody Concept semantics) { + return reasoner.klabService().affected(semantics); + } + + @PostMapping(ServicesAPI.REASONER.CREATED) + public Collection created(@RequestBody Concept semantics) { + return reasoner.klabService().created(semantics); + } + + @PostMapping(ServicesAPI.REASONER.ROLES_FOR) + public Collection rolesFor(@RequestBody Concept[] args) { + return reasoner.klabService().rolesFor(args[0], args.length == 1 ? null : args[1]); + } + + @PostMapping(ServicesAPI.REASONER.IMPLIED_ROLE) + public Concept impliedRole(@RequestBody Concept[] args) { + return reasoner.klabService().impliedRole(args[0], args.length == 1 ? null : args[1]); + } + + @PostMapping(ServicesAPI.REASONER.IMPLIED_ROLES) + public @ResponseBody Collection impliedRoles( + @RequestBody Concept role, + @RequestParam(name = "includeRelationshipEndpoints", defaultValue = "false") + boolean includeRelationshipEndpoints) { + return reasoner.klabService().impliedRoles(role, includeRelationshipEndpoints); + } } diff --git a/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/ObservationReasoner.java b/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/ObservationReasoner.java index 8ea5b634a..aeefd80c4 100644 --- a/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/ObservationReasoner.java +++ b/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/ObservationReasoner.java @@ -20,636 +20,683 @@ import java.util.*; /** - * Specialized functions to infer observation strategies. Kept separately for clarity as this is a crucial - * k.LAB component, although they are part of the reasoner services. + * Specialized functions to infer observation strategies. Kept separately for clarity as this is a + * crucial k.LAB component, although they are part of the reasoner services. */ public class ObservationReasoner { - private static Set defaultVariables = Set.of("this", "context"); - - private Reasoner reasoner; - private List observationStrategies = new ArrayList<>(); - - private static class QuickSemanticFilter { - - public Set semanticTypesWhitelist = EnumSet.noneOf(SemanticType.class); - public Set semanticTypesBlacklist = EnumSet.noneOf(SemanticType.class); - // any predefined variables used in patterns - public Set fixedVariablesUsed = new HashSet<>(); - public Set customVariablesUsed = new HashSet<>(); - public boolean collectiveConstraints; - public boolean collectiveOnly; - public boolean nonCollectiveOnly; - - /** - * Quick match to quickly weed out the non-matching classes and minimize the need for inference and - * pattern instantiation. - * - * @param observable - * @param scope - * @return - */ - public boolean match(Observable observable, ContextScope scope) { - if (!semanticTypesWhitelist.isEmpty()) { - if (Sets.intersection(observable.getSemantics().getType(), semanticTypesWhitelist).isEmpty()) { - return false; - } - } - if (!semanticTypesBlacklist.isEmpty()) { - if (!Sets.intersection(observable.getSemantics().getType(), semanticTypesBlacklist).isEmpty()) { - return false; - } - } - if (collectiveConstraints) { - if ((collectiveOnly && !observable.isCollective()) || (nonCollectiveOnly && observable.isCollective())) { - return false; - } - } - return true; - } + private static Set defaultVariables = Set.of("this", "context"); - } + private Reasoner reasoner; + private List observationStrategies = new ArrayList<>(); - /** - * We precompute the non-contextual applicable info for each strategy to quickly weed out those that are - * certain to not apply. - */ - private Map quickFilters = new HashMap<>(); + private static class QuickSemanticFilter { - public ObservationReasoner(ReasonerService reasonerService) { - this.reasoner = reasonerService; - // ensure the core functor library is read. Plugins may add more. - reasonerService.getComponentRegistry().loadExtensions("org.integratedmodelling.klab.services" + - ".reasoner.functors"); - } + public Set semanticTypesWhitelist = EnumSet.noneOf(SemanticType.class); + public Set semanticTypesBlacklist = EnumSet.noneOf(SemanticType.class); + // any predefined variables used in patterns + public Set fixedVariablesUsed = new HashSet<>(); + public Set customVariablesUsed = new HashSet<>(); + public boolean collectiveConstraints; + public boolean collectiveOnly; + public boolean nonCollectiveOnly; /** - * Compile and return a list of matching, contextualized observation strategies that match the observable - * and scope, in order of rank and cost, for the resolver to resolve. + * Quick match to quickly weed out the non-matching classes and minimize the need for inference + * and pattern instantiation. * - * @param observation + * @param observable * @param scope * @return */ - public List computeMatchingStrategies(Observation observation, ContextScope scope) { - - var observable = observation.getObservable(); - List ret = new ArrayList<>(); - - for (var strategy : observationStrategies) { - - - QuickSemanticFilter filter = quickFilters.get(strategy.getUrn()); - - if (filter.fixedVariablesUsed.contains("context") && scope.getContextObservation() == null) { - continue; - } - - if (filter.match(observable, scope)) { - - Map patternVariableValues = new HashMap<>(); - for (var variable : filter.fixedVariablesUsed) { - patternVariableValues.put(variable, switch (variable) { - case "this" -> observable; - case "context" -> scope.getContextObservation().getObservable(); - default -> - throw new KlabUnimplementedException("predefined pattern variable " + variable); - }); - } + public boolean match(Observable observable, ContextScope scope) { + if (!semanticTypesWhitelist.isEmpty()) { + if (Sets.intersection(observable.getSemantics().getType(), semanticTypesWhitelist) + .isEmpty()) { + return false; + } + } + if (!semanticTypesBlacklist.isEmpty()) { + if (!Sets.intersection(observable.getSemantics().getType(), semanticTypesBlacklist) + .isEmpty()) { + return false; + } + } + if (collectiveConstraints) { + if ((collectiveOnly && !observable.isCollective()) + || (nonCollectiveOnly && observable.isCollective())) { + return false; + } + } + return true; + } + } + + /** + * We precompute the non-contextual applicable info for each strategy to quickly weed out those + * that are certain to not apply. + */ + private Map quickFilters = new HashMap<>(); + + public ObservationReasoner(ReasonerService reasonerService) { + this.reasoner = reasonerService; + // ensure the core functor library is read. Plugins may add more. + reasonerService + .getComponentRegistry() + .loadExtensions("org.integratedmodelling.klab.services" + ".reasoner.functors"); + } + + /** + * Compile and return a list of matching, contextualized observation strategies that match the + * observable and scope, in order of rank and cost, for the resolver to resolve. + * + * @param observation + * @param scope + * @return + */ + public List computeMatchingStrategies( + Observation observation, ContextScope scope) { + + var observable = observation.getObservable(); + List ret = new ArrayList<>(); + + for (var strategy : observationStrategies) { + + QuickSemanticFilter filter = quickFilters.get(strategy.getUrn()); + + if (filter.fixedVariablesUsed.contains("context") && scope.getContextObservation() == null) { + continue; + } + + if (filter.match(observable, scope)) { + + Map patternVariableValues = new HashMap<>(); + for (var variable : filter.fixedVariablesUsed) { + patternVariableValues.put( + variable, + switch (variable) { + case "this" -> observable; + case "context" -> scope.getContextObservation().getObservable(); + default -> + throw new KlabUnimplementedException("predefined pattern variable " + variable); + }); + } - for (var variable : strategy.getMacroVariables().keySet()) { - var functor = strategy.getMacroVariables().get(variable); - if (functor.getLiteral() != null) { - patternVariableValues.put(variable, Utils.Data.asString(functor.getLiteral())); - } else if (functor.getMatch() != null) { - // can't happen for now, parser won't accept. Should be a pattern to be useful. - } else if (!functor.getFunctions().isEmpty()) { - for (var function : functor.getFunctions()) { - var value = matchFunction(function, observable, scope, Object.class, - patternVariableValues); - String[] varNames = variable.split(","); - if (value instanceof Collection collection) { - // must be string with same amount of return values - if (varNames.length != collection.size()) { - scope.error("wrong number of return values from " + function); - } - int i = 0; - for (var o : collection) { - patternVariableValues.put(varNames[i++], o); - } - } else { - // set pattern var - if (varNames.length != 1) { - scope.error("wrong number of return values from " + function); - } - patternVariableValues.put(variable, value); - } - } - } + for (var variable : strategy.getMacroVariables().keySet()) { + var functor = strategy.getMacroVariables().get(variable); + if (functor.getLiteral() != null) { + patternVariableValues.put(variable, Utils.Data.asString(functor.getLiteral())); + } else if (functor.getMatch() != null) { + // can't happen for now, parser won't accept. Should be a pattern to be useful. + } else if (!functor.getFunctions().isEmpty()) { + for (var function : functor.getFunctions()) { + var value = + matchFunction(function, observable, scope, Object.class, patternVariableValues); + String[] varNames = variable.split(","); + if (value instanceof Collection collection) { + // must be string with same amount of return values + if (varNames.length != collection.size()) { + scope.error("wrong number of return values from " + function); } - - // at least a matching filter is necessary - boolean match = false; - for (var filterList : strategy.getFilters()) { - for (var matching : filterList) { - if (matchFilter(matching, observation, scope, patternVariableValues)) { - match = true; - break; - } - } - if (match) { - break; - } + int i = 0; + for (var o : collection) { + patternVariableValues.put(varNames[i++], o); } - - if (!match) { - continue; + } else { + // set pattern var + if (varNames.length != 1) { + scope.error("wrong number of return values from " + function); } + patternVariableValues.put(variable, value); + } + } + } + } - /* - if we get here, the strategy definition is a match: compile the observation strategy - operations for the observable and scope - */ - - var os = new ObservationStrategyImpl(); - - os.setDocumentation(strategy.getDescription()); // TODO compile template - os.setUrn(strategy.getUrn()); - - for (var operation : strategy.getOperations()) { - - var op = new ObservationStrategyImpl.OperationImpl(); - op.setType(operation.getType()); - - if (operation.getObservable() != null) { - op.setObservable(operation.getObservable().getPatternVariables().isEmpty() ? - reasoner.declareObservable(operation.getObservable()) : - reasoner.declareObservable(operation.getObservable(), - patternVariableValues)); - } - for (var function : operation.getFunctions()) { - op.getContextualizables().add(new ContextualizableImpl(function)); - } - os.getOperations().add(op); - } - ret.add(os); + // at least a matching filter is necessary + boolean match = false; + for (var filterList : strategy.getFilters()) { + for (var matching : filterList) { + if (matchFilter(matching, observation, scope, patternVariableValues)) { + match = true; + break; } + } + if (match) { + break; + } } - return ret; - } + if (!match) { + continue; + } - private Object matchFunction(ServiceCall function, Semantics observable, ContextScope scope, - Class objectClass, Map patternVariableValues) { + /* + if we get here, the strategy definition is a match: compile the observation strategy + operations for the observable and scope + */ - var languageService = ServiceConfiguration.INSTANCE.getService(Language.class); + var os = new ObservationStrategyImpl(); - // complete arguments if empty or using previously instantiated variables - if (function.getParameters().isEmpty()) { - function = function.withUnnamedParameters(observable); - } else for (var key : function.getParameters().keySet()) { - // substitute parameters and set them as unnamed - function = function.withUnnamedParameters(patternVariableValues.getOrDefault(key.substring(1), - key)); - } - return languageService.execute(function, scope, Object.class); + os.setDocumentation(strategy.getDescription()); // TODO compile template + os.setUrn(strategy.getUrn()); - } + for (var operation : strategy.getOperations()) { - private boolean matchFilter(KimObservationStrategy.Filter filter, Observation observation, - ContextScope scope, Map patternVariableValues) { + var op = new ObservationStrategyImpl.OperationImpl(); + op.setType(operation.getType()); - boolean ret = true; - if (filter.getMatch() != null) { - var semantics = filter.getMatch().isPattern() ? reasoner.declareConcept(filter.getMatch(), - patternVariableValues) : reasoner.declareConcept(filter.getMatch()); - ret = semantics != null && reasoner.match(observation.getObservable(), semantics); - } - if (ret && !filter.getFunctions().isEmpty()) { - for (var function : filter.getFunctions()) { - var value = matchFunction(function, observation.getObservable(), scope, Object.class, - patternVariableValues); - ret = value instanceof Boolean bool && bool; - } + if (operation.getObservable() != null) { + op.setObservable( + operation.getObservable().getPatternVariables().isEmpty() + ? reasoner.declareObservable(operation.getObservable()) + : reasoner.declareObservable(operation.getObservable(), patternVariableValues)); + } + for (var function : operation.getFunctions()) { + op.getContextualizables().add(new ContextualizableImpl(function)); + } + os.getOperations().add(op); } - return filter.isNegated() != ret; + ret.add(os); + } } - /** - * An integer from 0 to 100, used to rank strategies in context among groups of strategies with - * the same rank. Only called on strategies that match the observable. - * - * @return - */ - public int getCost(ObservationStrategy strategy, Observable observable, ContextScope scope) { - return 0; + return ret; + } + + private Object matchFunction( + ServiceCall function, + Semantics observable, + ContextScope scope, + Class objectClass, + Map patternVariableValues) { + + var languageService = ServiceConfiguration.INSTANCE.getService(Language.class); + + // complete arguments if empty or using previously instantiated variables + if (function.getParameters().isEmpty()) { + function = function.withUnnamedParameters(observable); + } else + for (var key : function.getParameters().keySet()) { + // substitute parameters and set them as unnamed + function = + function.withUnnamedParameters( + patternVariableValues.getOrDefault(key.substring(1), key)); + } + return languageService.execute(function, scope, Object.class); + } + + private boolean matchFilter( + KimObservationStrategy.Filter filter, + Observation observation, + ContextScope scope, + Map patternVariableValues) { + + boolean ret = true; + if (filter.getMatch() != null) { + var semantics = + filter.getMatch().isPattern() + ? reasoner.declareConcept(filter.getMatch(), patternVariableValues) + : reasoner.declareConcept(filter.getMatch()); + ret = semantics != null && reasoner.match(observation.getObservable(), semantics); } - - /** - * Release the named namespace, i.e. remove all strategies it contains. - * - * @param strategyNamespace - */ - public void releaseNamespace(String strategyNamespace) { - var filtered = - observationStrategies.stream().filter(o -> !o.getNamespace().equals(strategyNamespace)).toList(); - observationStrategies.clear(); - observationStrategies.addAll(filtered); + if (ret && !filter.getFunctions().isEmpty()) { + for (var function : filter.getFunctions()) { + var value = + matchFunction( + function, observation.getObservable(), scope, Object.class, patternVariableValues); + ret = value instanceof Boolean bool && bool; + } } - - /** - * Add a new strategy or substitute the existing version of the same. - */ - public void registerStrategy(KimObservationStrategy observationStrategy) { - observationStrategies.add(observationStrategy); - quickFilters.put(observationStrategy.getUrn(), computeInfo(observationStrategy)); - } - - public void initializeStrategies() { - observationStrategies.sort(new Comparator() { - @Override - public int compare(KimObservationStrategy o1, KimObservationStrategy o2) { - return Integer.compare(o1.getRank(), o2.getRank()); - } + return filter.isNegated() != ret; + } + + /** + * An integer from 0 to 100, used to rank strategies in context among groups of + * strategies with the same rank. Only called on strategies that match the observable. + * + * @return + */ + public int getCost(ObservationStrategy strategy, Observable observable, ContextScope scope) { + return 0; + } + + /** + * Release the named namespace, i.e. remove all strategies it contains. + * + * @param strategyNamespace + */ + public void releaseNamespace(String strategyNamespace) { + var filtered = + observationStrategies.stream() + .filter(o -> !o.getNamespace().equals(strategyNamespace)) + .toList(); + observationStrategies.clear(); + observationStrategies.addAll(filtered); + } + + /** Add a new strategy or substitute the existing version of the same. */ + public void registerStrategy(KimObservationStrategy observationStrategy) { + observationStrategies.add(observationStrategy); + quickFilters.put(observationStrategy.getUrn(), computeInfo(observationStrategy)); + } + + public void initializeStrategies() { + observationStrategies.sort( + new Comparator() { + @Override + public int compare(KimObservationStrategy o1, KimObservationStrategy o2) { + return Integer.compare(o1.getRank(), o2.getRank()); + } }); - } - - private QuickSemanticFilter computeInfo(KimObservationStrategy observationStrategy) { - - Set variables = new HashSet<>(); - QuickSemanticFilter ret = new QuickSemanticFilter(); - - int nCollective = 0; - int nNoncollective = 0; - - for (var filter : observationStrategy.getFilters()) { - for (var match : filter) { - // TODO negation is much more complicated - if (match.getMatch() != null) { - if (match.isNegated()) { - ret.semanticTypesBlacklist.add(SemanticType.fundamentalType(match.getMatch().getType())); - } else { - ret.semanticTypesWhitelist.add(SemanticType.fundamentalType(match.getMatch().getType())); - } - if (match.getMatch().isCollective()) { - nCollective++; - } else { - nNoncollective++; - } - variables.addAll(match.getMatch().getPatternVariables()); - } - } - } - - for (var operation : observationStrategy.getOperations()) { - if (operation.getObservable() != null) { - variables.addAll(operation.getObservable().getPatternVariables()); - } - } - - if ((nCollective == 0 && nNoncollective > 0) || (nCollective > 0 && nNoncollective == 0)) { - ret.collectiveConstraints = true; - ret.collectiveOnly = nCollective > 0; - ret.nonCollectiveOnly = nNoncollective > 0; + } + + private QuickSemanticFilter computeInfo(KimObservationStrategy observationStrategy) { + + Set variables = new HashSet<>(); + QuickSemanticFilter ret = new QuickSemanticFilter(); + + int nCollective = 0; + int nNoncollective = 0; + + for (var filter : observationStrategy.getFilters()) { + for (var match : filter) { + // TODO negation is much more complicated + if (match.getMatch() != null) { + if (match.isNegated()) { + ret.semanticTypesBlacklist.add( + SemanticType.fundamentalType(match.getMatch().getType())); + } else { + ret.semanticTypesWhitelist.add( + SemanticType.fundamentalType(match.getMatch().getType())); + } + if (match.getMatch().isCollective()) { + nCollective++; + } else { + nNoncollective++; + } + variables.addAll(match.getMatch().getPatternVariables()); } + } + } - ret.fixedVariablesUsed.addAll(variables); - ret.fixedVariablesUsed.retainAll(defaultVariables); - ret.customVariablesUsed.addAll(variables); - ret.customVariablesUsed.removeAll(defaultVariables); + for (var operation : observationStrategy.getOperations()) { + if (operation.getObservable() != null) { + variables.addAll(operation.getObservable().getPatternVariables()); + } + } - return ret; + if ((nCollective == 0 && nNoncollective > 0) || (nCollective > 0 && nNoncollective == 0)) { + ret.collectiveConstraints = true; + ret.collectiveOnly = nCollective > 0; + ret.nonCollectiveOnly = nNoncollective > 0; } - // public void loadWorldview(Worldview worldview) { - // for (var strategyDocument : worldview.getObservationStrategies()) { - // for (var strategy : strategyDocument.getStatements()) { - // observationStrategies.add(new ObservationStrategyImpl(strategy, reasoner)); - // } - // } - // - // this.observationStrategies.sort(Comparator.comparingInt(ObservationStrategy::rank)); - // } - - // - // public List inferStrategies(Observable observable, ContextScope scope) { - // - // List ret = new ArrayList<>(); - // - // /* - // * If observable is abstract due to abstract traits, strategy is to find a model - // * for each of the traits, then defer the resolution of a concretized observable - // * into an OR-joined meta-observable,which will use a merger model with all the - // * independent observables as dependencies. - // */ - // var generics = observable.getGenericComponents(); - // var resources = reasoner.serviceScope().getService(ResourcesService.class); - // var traits = observable.is(SemanticType.QUALITY) - // ? reasoner.directAttributes(observable) - // : reasoner.directTraits(observable); - // - // /* - // TODO with traits, we should switch off the direct resolution if the unmodified observation is - // available for the naked observable, and switch directly to trait resolution - // */ - // - // /** - // * FIXME check if the "one strategy at a time" technique works in all situations - // */ - //// int rank = 0; - //// if (generics.isEmpty() && !observable.isAbstract()) { - //// ret.addAll(getDirectConcreteStrategies(observable, scope, rank++)); - //// } - //// - //// // TODO deferred strategies for unary operators that have built-in dereifiers - //// // defer to the argument(s), add distance computation - //// ObservationStrategyObsolete opDeferred = null; - //// if (observable.is(SemanticType.DISTANCE)) { - //// opDeferred = ObservationStrategyObsolete.builder(Observable.promote(reasoner - // .describedType(observable))) - //// .withCost(rank++) - //// .withOperation(ObservationStrategyObsolete.Operation.APPLY, (ServiceCall) null) - //// .build(); - //// } else if (observable.is(SemanticType.NUMEROSITY)) { - //// opDeferred = ObservationStrategyObsolete.builder(Observable.promote(reasoner - // .describedType(observable))) - //// .withCost(rank++) - //// .withOperation(ObservationStrategyObsolete.Operation.APPLY, (ServiceCall) null) - //// .build(); - //// } else if (observable.is(SemanticType.PRESENCE)) { - //// opDeferred = ObservationStrategyObsolete.builder(Observable.promote(reasoner - // .describedType(observable))) - //// .withCost(rank++) - //// .withOperation(ObservationStrategyObsolete.Operation.APPLY, (ServiceCall) null) - //// .build(); - //// } else if (observable.is(SemanticType.PERCENTAGE) || observable.is(SemanticType.PROPORTION)) { - ////// opDeferred = ObservationStrategy.builder(Observable.promote(reasoner.describedType - // (observable))) - ////// .withCost(rank++) - ////// .withOperation(ObservationStrategy.Operation.APPLY, (ServiceCall) null) - ////// .build(); - //// } else if (observable.is(SemanticType.RATIO)) { - ////// opDeferred = ObservationStrategy.builder(Observable.promote(reasoner.describedType - // (observable))) - ////// .withCost(rank++) - ////// .withOperation(ObservationStrategy.Operation.APPLY, (ServiceCall) null) - ////// .build(); - //// } - //// - //// if (opDeferred != null) { - //// ret.add(ObservationStrategyObsolete.builder(observable).withStrategy - // (ObservationStrategyObsolete.Operation.RESOLVE, opDeferred).withCost(rank).build()); - //// } - //// - //// if (!traits.isEmpty()) { - //// ret.addAll(getTraitConcreteStrategies(observable, traits, scope, rank++)); - //// } - //// - //// if (observable.is(SemanticType.QUALITY) && reasoner.directInherent(observable) != null) { - //// ret.addAll(getInherencyStrategies(observable, scope, rank++)); - //// } - //// - //// if (!observable.getValueOperators().isEmpty()) { - //// Observable withoutOperators = observable.builder(scope).withoutValueOperators().build(); - //// return addValueOperatorStrategies(inferStrategies(withoutOperators, scope), - //// observable.getValueOperators(), rank); - //// } - // - // - //// var traitStrategies = getTraitConcreteStrategies(ret, observable, traits); - //// - //// if (generics == null) { - //// ret.addAll(traitStrategies); - //// } else { - //// ret.addAll(getGenericConcreteStrategies(ret, observable, generics)); - //// } - //// - //// ret = insertSpecializedDeferralStrategies(ret, observable, scope); - // - // // TODO sort by rank - // - // return ret; - // - // } - // - // private List insertSpecializedDeferralStrategies - // (List ret, - // Observable observable, - // ContextScope scope, - // int rank) { - // // TODO - // return ret; - // } - // - // private List addValueOperatorStrategies - // (List ret, - // List> observable, int - // rank) { - // // TODO add new strategies to the previous one; increment their rank by 1 - // return ret; - // } - // - // /** - // * Inherency-based strategies are for qualities distributed to inherent contexts through - // of, - // * resolved by deferring the inherent objects with their inherent qualities and inserting an - // aggregating - // * core function for the main observable. - // * - // * @param observable - // * @param scope - // * @param rank - // * @return - // */ - // private List getInherencyStrategies(Observable observable, - // ContextScope scope, - // int rank) { - // // TODO - // return Collections.emptyList(); - // } - // - // /** - // * Indirect resolution of concrete traits in qualities and instances - // *

- // * For qualities: TODO - // *

- // * For instances: solution for (e.g.) landcover:Urban infrastructure:City should be - // * - // *

-    //     * DEFER infrastructure:City [instantiation]
-    //     *      RESOLVE landcover:LandCoverType of infrastructure:City [classification]
-    //     *      APPLY filter(trait=landcover:Urban, artifact=infrastructure:City) // -> builds the
-    //     filtered view
-    //     * 
- // *

- // * The solution for >1 traits, e.g. im:Big landcover:Urban infrastructure:City, simply resolves - // the first - // * trait and leaves the other in the deferred observation: - // *

-    //     * DEFER landcover:Urban infrastructure:City [instantiation]
-    //     *      RESOLVE im:SizeRelated of landcover:Urban infrastructure:City [classification]
-    //     *      APPLY klab.core.filter.objects(trait=im:Big, artifact=landcover:Urban infrastructure:City)
-    //     * 
- // *

- // * as the recursion implicit in DEFER takes care of the strategy for landcover:Urban - // * - // * @param observable - // * @param traits - // * @param scope - // * @param rank - // * @return - // */ - // private List getTraitConcreteStrategies(Observable observable, - // Collection traits, - // Scope scope, - // int rank) { - // List ret = new ArrayList<>(); - // Concept toResolve = traits.iterator().next(); - // - // var nakedObservable = observable.builder(scope).without(toResolve).build(); - // var builder = ObservationStrategyObsolete.builder(observable).withCost(rank); - // - // // TODO this is the strategy for instances, not for qualities - // - // var deferred = ObservationStrategyObsolete.builder(nakedObservable).withCost(rank); - // var baseTrait = reasoner.baseParentTrait(toResolve); - // if (baseTrait == null) { - // throw new KlabInternalErrorException("no base trait for " + toResolve); - // } - // deferred - // .withOperation(ObservationStrategyObsolete.Operation.OBSERVE, - // Observable.promote(baseTrait).builder(scope).of(nakedObservable.getSemantics - // ()).build()); - // - // if (observable.is(SemanticType.QUALITY)) { - // - // // TODO probably not necessary, the model seems generic enough - // - // // The resolve above has produced a quality of x observation, we must resolve the quality - // // selectively - // // where that quality is our target - // // TODO defer to concrete dependencies using CONCRETIZE which creates the concrete deps and - // // applies - // // an implicit WHERE to their resolution; then APPLY an aggregator for the main - // // observation. NO - CONCRETIZE is for generic quality observables. Generic countable - // observables - // // remain one dependency, which triggers classification and then resolution of the - // individual - // // classes on - // // filtered groups. - //// deferred.withOperation(ObservationStrategy.Operation.CONCRETIZE, ) - // - // } else { - // deferred - // // filter the instances to set the ones with the trait in context - // .withOperation(ObservationStrategyObsolete.Operation.APPLY, - // // FIXME this must be the FILTER call to filter instances with toSolve as - // // arguments - // (ServiceCall) null) - // // Explain the instantiated classification, deferring the resolution of the - // attributed - // // trait within the instances - // .withStrategy(ObservationStrategyObsolete.Operation.RESOLVE, - // ObservationStrategyObsolete.builder( - // Observable.promote(toResolve).builder(scope) - // .of(nakedObservable.getSemantics()) - // .optional(true).build()) - // .withCost(rank) - // .build()); - // } - // - // builder.withStrategy(ObservationStrategyObsolete.Operation.RESOLVE, deferred.build()); - // - // ret.add(builder.build()); - // - // return ret; - // } - // - // private List getGenericConcreteStrategies - // (List strategies, - // Observable observable, - // Collection - // generics, int rank) { - // List ret = new ArrayList<>(); - // return ret; - // } - // - //// /** - //// * Direct strategies have rank 0 - //// */ - //// private Collection getDirectConcreteStrategies(Observable - // observable, - //// Scope - // scope, int rank) { - //// - //// List ret = new ArrayList<>(); - //// - //// /* - //// * first course of action for concrete observables is always direct observation (finding a - // model and - //// * contextualizing it) - //// */ - //// var builder = - //// ObservationStrategyObsolete.builder(observable) - //// .withCost(rank); - //// - //// /** - //// * If we are resolving a relationship, we need the targets of the relationship first of all - //// */ - //// if (observable.is(SemanticType.RELATIONSHIP)) { - //// for (var target : reasoner.relationshipTargets(observable)) { - //// builder.withOperation(ObservationStrategyObsolete.Operation.OBSERVE, Observable - // .promote(target)); - //// } - //// } - //// - //// // main target - //// builder.withOperation(ObservationStrategyObsolete.Operation.OBSERVE, observable); - //// - //// // defer resolution of the instances - //// if (observable.getDescriptionType() == DescriptionType.INSTANTIATION) { - //// builder.withStrategy(ObservationStrategyObsolete.Operation.RESOLVE, - //// ObservationStrategyObsolete.builder(observable.builder(scope).as - // (DescriptionType.ACKNOWLEDGEMENT) - //// .optional(true).build()) - //// .withCost(rank) - //// .build()); - //// } - //// - //// ret.add(builder.build()); - //// - //// return ret; - //// } - // - //// /* - //// * these should be obtained from the classpath. Plug-ins may extend them. - //// */ - //// List strategies = new ArrayList<>(); - //// for( - //// ObservationStrategyPattern pattern :this.observationStrategyPatterns) - //// - //// { - //// if (pattern.matches(observable, scope)) { - //// strategies.add(pattern); - //// } - //// } - //// - //// if(!strategies.isEmpty()) - //// - //// { - //// strategies.sort(new Comparator<>() { - //// - //// @Override - //// public int compare(ObservationStrategyPattern o1, ObservationStrategyPattern o2) { - //// return Integer.compare(o1.getCost(observable, scope), o2.getCost(observable, scope)); - //// } - //// }); - //// for (ObservationStrategyPattern strategy : strategies) { - //// ret.add(strategy.getStrategy(observable, scope)); - //// } - //// } - //// - //// return ret; - ////} + ret.fixedVariablesUsed.addAll(variables); + ret.fixedVariablesUsed.retainAll(defaultVariables); + ret.customVariablesUsed.addAll(variables); + ret.customVariablesUsed.removeAll(defaultVariables); + + return ret; + } + + // public void loadWorldview(Worldview worldview) { + // for (var strategyDocument : worldview.getObservationStrategies()) { + // for (var strategy : strategyDocument.getStatements()) { + // observationStrategies.add(new ObservationStrategyImpl(strategy, reasoner)); + // } + // } + // + // this.observationStrategies.sort(Comparator.comparingInt(ObservationStrategy::rank)); + // } + + // + // public List inferStrategies(Observable observable, ContextScope scope) + // { + // + // List ret = new ArrayList<>(); + // + // /* + // * If observable is abstract due to abstract traits, strategy is to find a model + // * for each of the traits, then defer the resolution of a concretized observable + // * into an OR-joined meta-observable,which will use a merger model with all the + // * independent observables as dependencies. + // */ + // var generics = observable.getGenericComponents(); + // var resources = reasoner.serviceScope().getService(ResourcesService.class); + // var traits = observable.is(SemanticType.QUALITY) + // ? reasoner.directAttributes(observable) + // : reasoner.directTraits(observable); + // + // /* + // TODO with traits, we should switch off the direct resolution if the unmodified + // observation is + // available for the naked observable, and switch directly to trait resolution + // */ + // + // /** + // * FIXME check if the "one strategy at a time" technique works in all situations + // */ + //// int rank = 0; + //// if (generics.isEmpty() && !observable.isAbstract()) { + //// ret.addAll(getDirectConcreteStrategies(observable, scope, rank++)); + //// } + //// + //// // TODO deferred strategies for unary operators that have built-in dereifiers + //// // defer to the argument(s), add distance computation + //// ObservationStrategyObsolete opDeferred = null; + //// if (observable.is(SemanticType.DISTANCE)) { + //// opDeferred = ObservationStrategyObsolete.builder(Observable.promote(reasoner + // .describedType(observable))) + //// .withCost(rank++) + //// .withOperation(ObservationStrategyObsolete.Operation.APPLY, (ServiceCall) + // null) + //// .build(); + //// } else if (observable.is(SemanticType.NUMEROSITY)) { + //// opDeferred = ObservationStrategyObsolete.builder(Observable.promote(reasoner + // .describedType(observable))) + //// .withCost(rank++) + //// .withOperation(ObservationStrategyObsolete.Operation.APPLY, (ServiceCall) + // null) + //// .build(); + //// } else if (observable.is(SemanticType.PRESENCE)) { + //// opDeferred = ObservationStrategyObsolete.builder(Observable.promote(reasoner + // .describedType(observable))) + //// .withCost(rank++) + //// .withOperation(ObservationStrategyObsolete.Operation.APPLY, (ServiceCall) + // null) + //// .build(); + //// } else if (observable.is(SemanticType.PERCENTAGE) || + // observable.is(SemanticType.PROPORTION)) { + ////// opDeferred = + // ObservationStrategy.builder(Observable.promote(reasoner.describedType + // (observable))) + ////// .withCost(rank++) + ////// .withOperation(ObservationStrategy.Operation.APPLY, (ServiceCall) null) + ////// .build(); + //// } else if (observable.is(SemanticType.RATIO)) { + ////// opDeferred = + // ObservationStrategy.builder(Observable.promote(reasoner.describedType + // (observable))) + ////// .withCost(rank++) + ////// .withOperation(ObservationStrategy.Operation.APPLY, (ServiceCall) null) + ////// .build(); + //// } + //// + //// if (opDeferred != null) { + //// ret.add(ObservationStrategyObsolete.builder(observable).withStrategy + // (ObservationStrategyObsolete.Operation.RESOLVE, opDeferred).withCost(rank).build()); + //// } + //// + //// if (!traits.isEmpty()) { + //// ret.addAll(getTraitConcreteStrategies(observable, traits, scope, rank++)); + //// } + //// + //// if (observable.is(SemanticType.QUALITY) && reasoner.directInherent(observable) != + // null) { + //// ret.addAll(getInherencyStrategies(observable, scope, rank++)); + //// } + //// + //// if (!observable.getValueOperators().isEmpty()) { + //// Observable withoutOperators = + // observable.builder(scope).withoutValueOperators().build(); + //// return addValueOperatorStrategies(inferStrategies(withoutOperators, scope), + //// observable.getValueOperators(), rank); + //// } + // + // + //// var traitStrategies = getTraitConcreteStrategies(ret, observable, traits); + //// + //// if (generics == null) { + //// ret.addAll(traitStrategies); + //// } else { + //// ret.addAll(getGenericConcreteStrategies(ret, observable, generics)); + //// } + //// + //// ret = insertSpecializedDeferralStrategies(ret, observable, scope); + // + // // TODO sort by rank + // + // return ret; + // + // } + // + // private List insertSpecializedDeferralStrategies + // (List ret, + // Observable + // observable, + // ContextScope + // scope, + // int rank) { + // // TODO + // return ret; + // } + // + // private List addValueOperatorStrategies + // (List ret, + // + // List> observable, + // int + // rank) { + // // TODO add new strategies to the previous one; increment their rank by 1 + // return ret; + // } + // + // /** + // * Inherency-based strategies are for qualities distributed to inherent contexts through + // of, + // * resolved by deferring the inherent objects with their inherent qualities and inserting an + // aggregating + // * core function for the main observable. + // * + // * @param observable + // * @param scope + // * @param rank + // * @return + // */ + // private List getInherencyStrategies(Observable observable, + // ContextScope scope, + // int rank) { + // // TODO + // return Collections.emptyList(); + // } + // + // /** + // * Indirect resolution of concrete traits in qualities and instances + // *

+ // * For qualities: TODO + // *

+ // * For instances: solution for (e.g.) landcover:Urban infrastructure:City should be + // * + // *

+  //     * DEFER infrastructure:City [instantiation]
+  //     *      RESOLVE landcover:LandCoverType of infrastructure:City [classification]
+  //     *      APPLY filter(trait=landcover:Urban, artifact=infrastructure:City) // -> builds the
+  //     filtered view
+  //     * 
+ // *

+ // * The solution for >1 traits, e.g. im:Big landcover:Urban infrastructure:City, simply + // resolves + // the first + // * trait and leaves the other in the deferred observation: + // *

+  //     * DEFER landcover:Urban infrastructure:City [instantiation]
+  //     *      RESOLVE im:SizeRelated of landcover:Urban infrastructure:City [classification]
+  //     *      APPLY klab.core.filter.objects(trait=im:Big, artifact=landcover:Urban
+  // infrastructure:City)
+  //     * 
+ // *

+ // * as the recursion implicit in DEFER takes care of the strategy for landcover:Urban + // * + // * @param observable + // * @param traits + // * @param scope + // * @param rank + // * @return + // */ + // private List getTraitConcreteStrategies(Observable observable, + // Collection + // traits, + // Scope scope, + // int rank) { + // List ret = new ArrayList<>(); + // Concept toResolve = traits.iterator().next(); + // + // var nakedObservable = observable.builder(scope).without(toResolve).build(); + // var builder = ObservationStrategyObsolete.builder(observable).withCost(rank); + // + // // TODO this is the strategy for instances, not for qualities + // + // var deferred = ObservationStrategyObsolete.builder(nakedObservable).withCost(rank); + // var baseTrait = reasoner.baseParentTrait(toResolve); + // if (baseTrait == null) { + // throw new KlabInternalErrorException("no base trait for " + toResolve); + // } + // deferred + // .withOperation(ObservationStrategyObsolete.Operation.OBSERVE, + // + // Observable.promote(baseTrait).builder(scope).of(nakedObservable.getSemantics + // ()).build()); + // + // if (observable.is(SemanticType.QUALITY)) { + // + // // TODO probably not necessary, the model seems generic enough + // + // // The resolve above has produced a quality of x observation, we must resolve the + // quality + // // selectively + // // where that quality is our target + // // TODO defer to concrete dependencies using CONCRETIZE which creates the concrete + // deps and + // // applies + // // an implicit WHERE to their resolution; then APPLY an aggregator for the main + // // observation. NO - CONCRETIZE is for generic quality observables. Generic + // countable + // observables + // // remain one dependency, which triggers classification and then resolution of the + // individual + // // classes on + // // filtered groups. + //// deferred.withOperation(ObservationStrategy.Operation.CONCRETIZE, ) + // + // } else { + // deferred + // // filter the instances to set the ones with the trait in context + // .withOperation(ObservationStrategyObsolete.Operation.APPLY, + // // FIXME this must be the FILTER call to filter instances with + // toSolve as + // // arguments + // (ServiceCall) null) + // // Explain the instantiated classification, deferring the resolution of the + // attributed + // // trait within the instances + // .withStrategy(ObservationStrategyObsolete.Operation.RESOLVE, + // ObservationStrategyObsolete.builder( + // Observable.promote(toResolve).builder(scope) + // .of(nakedObservable.getSemantics()) + // .optional(true).build()) + // .withCost(rank) + // .build()); + // } + // + // builder.withStrategy(ObservationStrategyObsolete.Operation.RESOLVE, deferred.build()); + // + // ret.add(builder.build()); + // + // return ret; + // } + // + // private List getGenericConcreteStrategies + // (List strategies, + // Observable + // observable, + // Collection + // generics, int rank) { + // List ret = new ArrayList<>(); + // return ret; + // } + // + //// /** + //// * Direct strategies have rank 0 + //// */ + //// private Collection getDirectConcreteStrategies(Observable + // observable, + //// + // Scope + // scope, int rank) { + //// + //// List ret = new ArrayList<>(); + //// + //// /* + //// * first course of action for concrete observables is always direct observation + // (finding a + // model and + //// * contextualizing it) + //// */ + //// var builder = + //// ObservationStrategyObsolete.builder(observable) + //// .withCost(rank); + //// + //// /** + //// * If we are resolving a relationship, we need the targets of the relationship first + // of all + //// */ + //// if (observable.is(SemanticType.RELATIONSHIP)) { + //// for (var target : reasoner.relationshipTargets(observable)) { + //// builder.withOperation(ObservationStrategyObsolete.Operation.OBSERVE, + // Observable + // .promote(target)); + //// } + //// } + //// + //// // main target + //// builder.withOperation(ObservationStrategyObsolete.Operation.OBSERVE, observable); + //// + //// // defer resolution of the instances + //// if (observable.getDescriptionType() == DescriptionType.INSTANTIATION) { + //// builder.withStrategy(ObservationStrategyObsolete.Operation.RESOLVE, + //// ObservationStrategyObsolete.builder(observable.builder(scope).as + // (DescriptionType.ACKNOWLEDGEMENT) + //// .optional(true).build()) + //// .withCost(rank) + //// .build()); + //// } + //// + //// ret.add(builder.build()); + //// + //// return ret; + //// } + // + //// /* + //// * these should be obtained from the classpath. Plug-ins may extend them. + //// */ + //// List strategies = new ArrayList<>(); + //// for( + //// ObservationStrategyPattern pattern :this.observationStrategyPatterns) + //// + //// { + //// if (pattern.matches(observable, scope)) { + //// strategies.add(pattern); + //// } + //// } + //// + //// if(!strategies.isEmpty()) + //// + //// { + //// strategies.sort(new Comparator<>() { + //// + //// @Override + //// public int compare(ObservationStrategyPattern o1, ObservationStrategyPattern o2) { + //// return Integer.compare(o1.getCost(observable, scope), o2.getCost(observable, + // scope)); + //// } + //// }); + //// for (ObservationStrategyPattern strategy : strategies) { + //// ret.add(strategy.getStrategy(observable, scope)); + //// } + //// } + //// + //// return ret; + //// } } diff --git a/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/ReasonerService.java b/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/ReasonerService.java index 8b5ad2b74..a63abfc0f 100644 --- a/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/ReasonerService.java +++ b/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/ReasonerService.java @@ -77,2849 +77,2993 @@ @Service public class ReasonerService extends BaseService implements Reasoner, Reasoner.Admin { - @Serial - private static final long serialVersionUID = 380622027752591182L; - - /** - * Flag for {@link #compatible(Semantics, Semantics, int)}. - *

- * If passed to {@link #compatible(Semantics, Semantics, int)}, different realms will not determine - * incompatibility. - */ - static public final int ACCEPT_REALM_DIFFERENCES = 0x01; + @Serial private static final long serialVersionUID = 380622027752591182L; + + /** + * Flag for {@link #compatible(Semantics, Semantics, int)}. + * + *

If passed to {@link #compatible(Semantics, Semantics, int)}, different realms will not + * determine incompatibility. + */ + public static final int ACCEPT_REALM_DIFFERENCES = 0x01; + + /** + * Flag for {@link #compatible(Semantics, Semantics, int)}. + * + *

If passed to {@link #compatible(Semantics, Semantics, int)}, only types that have the exact + * same core type will be accepted. + */ + public static final int REQUIRE_SAME_CORE_TYPE = 0x02; + + /** + * Flag for {@link #compatible(Semantics, Semantics, int)}. + * + *

If passed to {@link #compatible(Semantics, Semantics, int)}, types with roles that are more + * general of the roles in the first concept will be accepted. + */ + public static final int USE_ROLE_PARENT_CLOSURE = 0x04; + + /** + * Flag for {@link #compatible(Semantics, Semantics, int)}. + * + *

If passed to {@link #compatible(Semantics, Semantics, int)}, types with traits that are more + * general of the traits in the first concept will be accepted. + */ + public static final int USE_TRAIT_PARENT_CLOSURE = 0x08; + + // /** + // * Flag for {@link #compatible(Semantics, Semantics, int)}. + // *

+ // * If passed to {@link #compatible(Semantics, Semantics, int)} causes acceptance of + // subjective + // traits + // for + // * observables. + // */ + // static public final int ACCEPT_SUBJECTIVE_OBSERVABLES = 0x10; + + private AtomicBoolean consistent = new AtomicBoolean(false); + private ReasonerConfiguration configuration = new ReasonerConfiguration(); + private Map coreConceptPeers = new HashMap<>(); + private Map emergent = new HashMap<>(); + private IntelligentMap> emergence; + // TODO fill in from classpath + private Map concepts = Collections.synchronizedMap(new HashMap<>()); + private Map observables = Collections.synchronizedMap(new HashMap<>()); + private ObservationReasoner observationReasoner; + private Worldview worldview; + private SyntacticMatcher syntacticMatcher; + + // /** + // * Caches for concepts and observables, linked to the URI in the corresponding + // {@link + // KimScope}. + // */ + // private LoadingCache concepts = CacheBuilder.newBuilder() + // // .expireAfterAccess(10, TimeUnit.MINUTES) + // .build(new CacheLoader(){ + // public Concept load(String key) { + // KimConcept parsed = + // scope.getService(ResourcesService.class).resolveConcept(key); + // return declareConcept(parsed); + // } + // }); + // + // private LoadingCache observables = + // CacheBuilder.newBuilder() + // // .expireAfterAccess(10, TimeUnit.MINUTES) + // .build(new CacheLoader(){ + // public Observable load(String key) { // no checked exception + // KimObservable parsed = + // scope.getService(ResourcesService.class).resolveObservable(key); + // return declareObservable(parsed); + // } + // }); + + Indexer indexer; + + /** + * Cache for ongoing requests expires in 10 minutes. CHECK this may be less and become + * configurable. + */ + private Cache semanticExpressions = + CacheBuilder.newBuilder().expireAfterAccess(10, TimeUnit.MINUTES).build(); + + private OWL owl; + private String hardwareSignature = Utils.Names.getHardwareId(); + static Pattern internalConceptPattern = Pattern.compile("[A-Z]+_[0-9]+"); + + public boolean derived(Semantics c) { + return internalConceptPattern.matcher(c.getName()).matches(); + } + + public OWL owl() { + return owl; + } + + /** + * An emergence is the appearance of an observation triggered by another, under the assumptions + * stated in the worldview. It applies to processes and relationships and its emergent observable + * can be a configuration, subject or process. + * + * @author Ferd + */ + public class Emergence { + + public Set triggerObservables = new LinkedHashSet<>(); + public Concept emergentObservable; + public String namespaceId; + + public Set matches(Concept relationship, ContextScope scope) { + + for (Concept trigger : triggerObservables) { + Set ret = new HashSet<>(); + checkScope(trigger, makeObservationCatalog(scope), relationship, ret); + if (!ret.isEmpty()) { + return ret; + } + } + + return Collections.emptySet(); + } + + private Map makeObservationCatalog(ContextScope scope) { + Map ret = new HashMap<>(); + for (var observation : scope.query(Observation.class)) { + ret.put(observation.getObservable(), observation); + } + return ret; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + getEnclosingInstance().hashCode(); + result = prime * result + Objects.hash(emergentObservable, namespaceId, triggerObservables); + return result; + } + + private Object getEnclosingInstance() { + return ReasonerService.this; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + Emergence other = (Emergence) obj; + if (!getEnclosingInstance().equals(other.getEnclosingInstance())) return false; + return Objects.equals(emergentObservable, other.emergentObservable) + && Objects.equals(namespaceId, other.namespaceId) + && Objects.equals(triggerObservables, other.triggerObservables); + } - /** - * Flag for {@link #compatible(Semantics, Semantics, int)}. - *

- * If passed to {@link #compatible(Semantics, Semantics, int)}, only types that have the exact same core - * type will be accepted. + /* + * current observable must be one of the triggers, any others need to be in + * scope */ - static public final int REQUIRE_SAME_CORE_TYPE = 0x02; + private void checkScope( + Concept trigger, + Map map, + Concept relationship, + Set obs) { + if (trigger.is(SemanticType.UNION)) { + for (Concept trig : operands(trigger)) { + checkScope(trig, map, relationship, obs); + } + } else if (trigger.is(SemanticType.INTERSECTION)) { + for (Concept trig : operands(trigger)) { + Set oobs = new HashSet<>(); + checkScope(trig, map, relationship, oobs); + if (oobs.isEmpty()) { + obs = oobs; + } + } + } else { + Observation a = map.get(trigger); + if (a != null) { + obs.add(a); + } + } + } + } + + @Autowired + public ReasonerService(AbstractServiceDelegatingScope scope, ServiceStartupOptions options) { + super(scope, Type.REASONER, options); + this.scope = scope; + this.owl = new OWL(scope); + this.indexer = new Indexer(scope); + this.emergence = new IntelligentMap<>(scope); + ServiceConfiguration.INSTANCE.setMainService(this); + readConfiguration(options); + } + + private void readConfiguration(ServiceStartupOptions options) { + File config = BaseService.getFileInConfigurationDirectory(options, "reasoner.yaml"); + if (config.exists() && config.length() > 0 && !options.isClean()) { + this.configuration = + org.integratedmodelling.common.utils.Utils.YAML.load(config, ReasonerConfiguration.class); + } else { + // make an empty config + this.configuration = new ReasonerConfiguration(); + // this.configuration.setServicePath("resources"); + // this.configuration.setLocalResourcePath("local"); + // this.configuration.setPublicResourcePath("public"); + this.configuration.setServiceId(UUID.randomUUID().toString()); + saveConfiguration(); + } + } + + @Override + public void initializeService() { + + Logging.INSTANCE.setSystemIdentifier("Reasoner service: "); + + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceInitializing, + capabilities(serviceScope())); + + for (ProjectConfiguration authority : configuration.getAuthorities()) { + loadAuthority(authority); + } + + this.observationReasoner = new ObservationReasoner(this); + this.syntacticMatcher = + new SyntacticMatcher(this, serviceScope().getService(ResourcesService.class)); - /** - * Flag for {@link #compatible(Semantics, Semantics, int)}. - *

- * If passed to {@link #compatible(Semantics, Semantics, int)}, types with roles that are more general of - * the roles in the first concept will be accepted. + /* + * Setup an embedded broker, possibly to be shared with other services, if we're local and there + * is no configured broker. */ - static public final int USE_ROLE_PARENT_CLOSURE = 0x04; + if (Utils.URLs.isLocalHost(this.getUrl()) && this.configuration.getBrokerURI() == null) { + Logging.INSTANCE.info("Setting up embedded broker in local service"); + this.embeddedBroker = new EmbeddedBroker(); + Logging.INSTANCE.info( + "Embedded broker is " + + (embeddedBroker.isOnline() ? ("online at " + embeddedBroker.getURI()) : "offline")); + } - /** - * Flag for {@link #compatible(Semantics, Semantics, int)}. - *

- * If passed to {@link #compatible(Semantics, Semantics, int)}, types with traits that are more general of - * the traits in the first concept will be accepted. + /* + This is called when resources are available, so this is the time to load the worldview. */ - static public final int USE_TRAIT_PARENT_CLOSURE = 0x08; - - // /** - // * Flag for {@link #compatible(Semantics, Semantics, int)}. - // *

- // * If passed to {@link #compatible(Semantics, Semantics, int)} causes acceptance of subjective - // traits - // for - // * observables. - // */ - // static public final int ACCEPT_SUBJECTIVE_OBSERVABLES = 0x10; - - private AtomicBoolean consistent = new AtomicBoolean(false); - private ReasonerConfiguration configuration = new ReasonerConfiguration(); - private Map coreConceptPeers = new HashMap<>(); - private Map emergent = new HashMap<>(); - private IntelligentMap> emergence; - // TODO fill in from classpath - private Map concepts = Collections.synchronizedMap(new HashMap<>()); - private Map observables = Collections.synchronizedMap(new HashMap<>()); - private ObservationReasoner observationReasoner; - private Worldview worldview; - private SyntacticMatcher syntacticMatcher; - - // /** - // * Caches for concepts and observables, linked to the URI in the corresponding - // {@link - // KimScope}. + for (var resources : serviceScope().getServices(ResourcesService.class)) { + if (resources.status().isAvailable() + && resources.capabilities(serviceScope()).isWorldviewProvider()) { + + var notifications = loadKnowledge(resources.getWorldview(), serviceScope()); + + if (!org.integratedmodelling.klab.api.utils.Utils.Resources.hasErrors(notifications)) { + // setOperational(false); + // serviceScope().warn("Worldview loading failed: reasoner is + // disabled"); + // } else { + setOperational(true); + serviceScope().info("Worldview loaded into local reasoner"); + + // TODO if there were previous logical notifications they should be deleted now + + /* + We stop at the first worldview that loads. All available worldiews should be + synchronized and mirrored automatically, and two services with different worldviews + accessible to the same reasoner is a configuration abomination that should never happen. + */ + break; + } + } + } + + // TODO keep logical notifications around for the capabilities, or have a separate status call + // for + // notifications. + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceAvailable, + capabilities(serviceScope())); + } + + @Override + public boolean operationalizeService() { + // we have done what we needed, just return the outcome. Basically we're not operational unless + // we + // have a valid worldview. + return isOperational(); + } + + @SuppressWarnings("unchecked") + private void loadAuthority(ProjectConfiguration authority) { + if (authority.getUrl().startsWith("classpath:")) { + try { + Logging.INSTANCE.info( + "loading authority " + authority.getProject() + " from local " + "classpath"); + Class cls = + (Class) + Class.forName(authority.getUrl().substring(("classpath" + ":").length())); + ServiceConfiguration.INSTANCE.registerAuthority(cls.getDeclaredConstructor().newInstance()); + Logging.INSTANCE.info( + "Authority " + + authority.getProject() + + " ready for " + + (authority.isServe() ? "global" : "local") + + " use"); + } catch (Exception e) { + Logging.INSTANCE.error(e); + } + } + // TODO Auto-generated method stub + + } + + private void saveConfiguration() { + File config = BaseService.getFileInConfigurationDirectory(startupOptions, "reasoner.yaml"); + org.integratedmodelling.common.utils.Utils.YAML.save(this.configuration, config); + } + + @Override + public Concept defineConcept(KimConceptStatement statement, Scope scope) { + return build( + statement, + this.owl.requireOntology(statement.getNamespace(), OWL.DEFAULT_ONTOLOGY_PREFIX), + null, + scope); + } + + @Override + public boolean scopesAreReactive() { + return false; + } + + @Override + public ServiceStatus status() { + var ret = super.status(); + if (ret instanceof ServiceStatusImpl serviceStatus) { + serviceStatus.setConsistent(this.consistent.get()); + } + return ret; + } + + @Override + public Concept resolveConcept(String definition) { + Concept ret = concepts.get(definition); + if (ret == null) { + KimConcept parsed = scope.getService(ResourcesService.class).resolveConcept(definition); + if (parsed != null) { + ret = declareConcept(parsed); + concepts.put(definition, ret); + } else { + // TODO add an error concept in case of errors or null + } + } + return ret; + } + + @Override + public Observable resolveObservable(String definition) { + Observable ret = observables.get(definition); + if (ret == null) { + KimObservable parsed = scope.getService(ResourcesService.class).resolveObservable(definition); + if (parsed != null) { + ret = declareObservable(parsed); + observables.put(definition, ret); + } else { + // TODO add an error observable in case of errors or null + } + } + return ret; + } + + private Observable errorObservable(String definition) { + // TODO Auto-generated method stub + return null; + } + + private Concept errorConcept(String definition) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Collection operands(Semantics target) { + List ret = new ArrayList<>(); + if (target.is(SemanticType.UNION) || target.is(SemanticType.INTERSECTION)) { + ret.addAll(this.owl.getOperands(target.asConcept())); + } else { + ret.add(target.asConcept()); + } + + return ret; + } + + @Override + public Collection children(Semantics target) { + return this.owl.getChildren(target.asConcept()); + } + + public Map> emergentResolvables( + Observation trigger, ContextScope scope) { + + Map> ret = new HashMap<>(); + Collection emergents = this.emergence.get(trigger.getObservable().getSemantics()); + + // if (!(scope instanceof IRuntimeScope) || ((IRuntimeScope) + // scope).getActuator() == null) { + // return Collections.emptyMap(); + // } + // + // Mode mode = ((IRuntimeScope) scope).getActuator().getMode(); + // + // /* + // * Skip a search in the map if we can't trigger anything. + // */ + // if (!trigger.getObservable().is(Type.QUALITY) + // && !(trigger.getObservable().is(Type.RELATIONSHIP) && mode == + // Mode.INSTANTIATION)) { + // return Collections.emptyMap(); + // } + // + // Map> ret = new HashMap<>(); + // Collection emergents = + // this.emergence.get(trigger.getObservable().getType()); + // + // if (emergents != null) { + // + // for (Emergence emergent : emergents) { + // + // Collection match = + // emergent.matches(trigger.getObservable().getType(), + // (IRuntimeScope) scope); + // + // /* + // * if process or configuration, update and skip if the scope already contains + // * the emergent observation + // */ + // if (emergent.emergentObservable.is(Type.PROCESS) + // || emergent.emergentObservable.is(Type.CONFIGURATION)) { + // if (((IRuntimeScope) scope).getCatalog() + // .get(new ObservedConcept(emergent.emergentObservable)) != null) { + // /* + // * TODO update with the new observation(s)! API to be defined // */ - // private LoadingCache concepts = CacheBuilder.newBuilder() - // // .expireAfterAccess(10, TimeUnit.MINUTES) - // .build(new CacheLoader(){ - // public Concept load(String key) { - // KimConcept parsed = - // scope.getService(ResourcesService.class).resolveConcept(key); - // return declareConcept(parsed); + // if (((IDirectObservation) trigger).getOriginatingPattern() != null) { + // ((IDirectObservation) trigger).getOriginatingPattern().update(trigger); + // return ret; + // } + // } // } - // }); // - // private LoadingCache observables = - // CacheBuilder.newBuilder() - // // .expireAfterAccess(10, TimeUnit.MINUTES) - // .build(new CacheLoader(){ - // public Observable load(String key) { // no checked exception - // KimObservable parsed = - // scope.getService(ResourcesService.class).resolveObservable(key); - // return declareObservable(parsed); + // ret.put(emergent.emergentObservable, match); // } - // }); - - Indexer indexer; - - /** - * Cache for ongoing requests expires in 10 minutes. CHECK this may be less and become configurable. - */ - private Cache semanticExpressions = - CacheBuilder.newBuilder().expireAfterAccess(10, TimeUnit.MINUTES).build(); - private OWL owl; - private String hardwareSignature = Utils.Names.getHardwareId(); - static Pattern internalConceptPattern = Pattern.compile("[A-Z]+_[0-9]+"); - - public boolean derived(Semantics c) { - return internalConceptPattern.matcher(c.getName()).matches(); - } - - public OWL owl() { - return owl; - } + // } + return ret; + } - /** - * An emergence is the appearance of an observation triggered by another, under the assumptions stated in - * the worldview. It applies to processes and relationships and its emergent observable can be a - * configuration, subject or process. - * - * @author Ferd - */ - public class Emergence { + @Override + public Collection parents(Semantics target) { + return this.owl.getParents(target.asConcept()); + } - public Set triggerObservables = new LinkedHashSet<>(); - public Concept emergentObservable; - public String namespaceId; + @Override + public Collection allChildren(Semantics target) { - public Set matches(Concept relationship, ContextScope scope) { + Set ret = collectChildren(target, new HashSet()); + ret.add(target.asConcept()); - for (Concept trigger : triggerObservables) { - Set ret = new HashSet<>(); - checkScope(trigger, makeObservationCatalog(scope), relationship, ret); - if (!ret.isEmpty()) { - return ret; - } - } + return ret; + } - return Collections.emptySet(); - } + private Set collectChildren(Semantics target, Set hashSet) { - private Map makeObservationCatalog(ContextScope scope) { - Map ret = new HashMap<>(); - for (var observation : scope.query(Observation.class)) { - ret.put(observation.getObservable(), observation); - } - return ret; - } + for (Concept c : children(target)) { + if (!hashSet.contains(c)) collectChildren(c, hashSet); + hashSet.add(c); + } + return hashSet; + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + getEnclosingInstance().hashCode(); - result = prime * result + Objects.hash(emergentObservable, namespaceId, triggerObservables); - return result; - } + @Override + public Collection allParents(Semantics target) { + return allParentsInternal(target, new HashSet()); + } - private Object getEnclosingInstance() { - return ReasonerService.this; - } + private Collection allParentsInternal(Semantics target, Set seen) { - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - Emergence other = (Emergence) obj; - if (!getEnclosingInstance().equals(other.getEnclosingInstance())) return false; - return Objects.equals(emergentObservable, other.emergentObservable) && Objects.equals(namespaceId, other.namespaceId) && Objects.equals(triggerObservables, other.triggerObservables); - } + Set concepts = new HashSet<>(); - /* - * current observable must be one of the triggers, any others need to be in - * scope - */ - private void checkScope(Concept trigger, Map map, Concept relationship, - Set obs) { - if (trigger.is(SemanticType.UNION)) { - for (Concept trig : operands(trigger)) { - checkScope(trig, map, relationship, obs); - } - } else if (trigger.is(SemanticType.INTERSECTION)) { - for (Concept trig : operands(trigger)) { - Set oobs = new HashSet<>(); - checkScope(trig, map, relationship, oobs); - if (oobs.isEmpty()) { - obs = oobs; - } - } - } else { - Observation a = map.get(trigger); - if (a != null) { - obs.add(a); - } - } - } + if (seen.contains(target)) { + return concepts; } - @Autowired - public ReasonerService(AbstractServiceDelegatingScope scope, ServiceStartupOptions options) { - super(scope, Type.REASONER, options); - this.scope = scope; - this.owl = new OWL(scope); - this.indexer = new Indexer(scope); - this.emergence = new IntelligentMap<>(scope); - ServiceConfiguration.INSTANCE.setMainService(this); - readConfiguration(options); - } + seen.add(target.asConcept()); - private void readConfiguration(ServiceStartupOptions options) { - File config = BaseService.getFileInConfigurationDirectory(options, "reasoner.yaml"); - if (config.exists() && config.length() > 0 && !options.isClean()) { - this.configuration = org.integratedmodelling.common.utils.Utils.YAML.load(config, - ReasonerConfiguration.class); - } else { - // make an empty config - this.configuration = new ReasonerConfiguration(); - // this.configuration.setServicePath("resources"); - // this.configuration.setLocalResourcePath("local"); - // this.configuration.setPublicResourcePath("public"); - this.configuration.setServiceId(UUID.randomUUID().toString()); - saveConfiguration(); - } + for (Concept c : parents(target)) { + concepts.add(c); + concepts.addAll(allParentsInternal(c, seen)); } - @Override - public void initializeService() { - - Logging.INSTANCE.setSystemIdentifier("Reasoner service: "); - - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceInitializing, - capabilities(serviceScope())); - - for (ProjectConfiguration authority : configuration.getAuthorities()) { - loadAuthority(authority); - } - - this.observationReasoner = new ObservationReasoner(this); - this.syntacticMatcher = new SyntacticMatcher(this, serviceScope().getService(ResourcesService.class)); + return concepts; + } - /* - * Setup an embedded broker, possibly to be shared with other services, if we're local and there - * is no configured broker. - */ - if (Utils.URLs.isLocalHost(this.getUrl()) && this.configuration.getBrokerURI() == null) { - Logging.INSTANCE.info("Setting up embedded broker in local service"); - this.embeddedBroker = new EmbeddedBroker(); - Logging.INSTANCE.info("Embedded broker is " + (embeddedBroker.isOnline() ? - ("online at " + embeddedBroker.getURI()) : - "offline")); - } - - /* - This is called when resources are available, so this is the time to load the worldview. - */ - for (var resources : serviceScope().getServices(ResourcesService.class)) { - if (resources.status().isAvailable() && resources.capabilities(serviceScope()).isWorldviewProvider()) { + @Override + public Collection closure(Semantics target) { + return this.owl.getSemanticClosure(target.asConcept()); + } - var notifications = loadKnowledge(resources.getWorldview(), serviceScope()); + @Override + public boolean resolves(Semantics toResolve, Semantics other, Semantics context) { - if (!org.integratedmodelling.klab.api.utils.Utils.Resources.hasErrors(notifications)) { - // setOperational(false); - // serviceScope().warn("Worldview loading failed: reasoner is - // disabled"); - // } else { - setOperational(true); - serviceScope().info("Worldview loaded into local reasoner"); - - // TODO if there were previous logical notifications they should be deleted now + /* + TODO if these are observables, the observer also must be considered and matched by semantic distance. - /* - We stop at the first worldview that loads. All available worldiews should be - synchronized and mirrored automatically, and two services with different worldviews - accessible to the same reasoner is a configuration abomination that should never happen. - */ - break; + TODO the observable now just carries an Observer identity but NOT a contract about the type of + observer it + accepts. This is necessary for models to be able to declare their observers before they are actually + observed. - } - } - } + In each observable, the actual Observer (if semantic) takes over the contract when matching. This, + with the + fact that the observer may be a mere Identity, makes for a pretty complicated comparison. - // TODO keep logical notifications around for the capabilities, or have a separate status call for - // notifications. - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceAvailable, - capabilities(serviceScope())); + The observer instance is an Identity - if that's also a DirectObservation use semantics, otherwise + match with + equals(). - } + An incoming without observer will match one with, but not the other way around unless the observers + are + compatible. + */ - @Override - public boolean operationalizeService() { - // we have done what we needed, just return the outcome. Basically we're not operational unless we - // have a valid worldview. - return isOperational(); - } - - @SuppressWarnings("unchecked") - private void loadAuthority(ProjectConfiguration authority) { - if (authority.getUrl().startsWith("classpath:")) { - try { - Logging.INSTANCE.info("loading authority " + authority.getProject() + " from local " + - "classpath"); - Class cls = - (Class) Class.forName(authority.getUrl().substring(("classpath" + - ":").length())); - ServiceConfiguration.INSTANCE.registerAuthority(cls.getDeclaredConstructor().newInstance()); - Logging.INSTANCE.info("Authority " + authority.getProject() + " ready for " + (authority.isServe() ? "global" : "local") + " use"); - } catch (Exception e) { - Logging.INSTANCE.error(e); - } + return semanticDistance(toResolve, other, context) >= 0; + } + + @Override + public int semanticDistance(Semantics target, Semantics other) { + return semanticDistance(target.asConcept(), other.asConcept(), null, true, null); + } + + @Override + public int semanticDistance(Semantics target, Semantics other, Semantics context) { + return semanticDistance( + target.asConcept(), + other.asConcept(), + context == null ? null : context.asConcept(), + true, + null); + } + + /** + * The workhorse of semantic distance computation can also consider any predicates that were + * abstract in the lineage of the passed concept (i.e. the concept is the result of a query with + * the abstract predicates, which has been contextualized to incarnate them into the passed + * correspondence with concrete counterparts). In that case, and only in that case, the distance + * between a concrete candidate and one that contains its predicates in the abstract form can be + * positive, i.e. a concept with abstract predicates can resolve one with concrete subclasses as + * long as the lineage contains its resolution. + * + * @param to + * @param context + * @param compareInherency + * @param resolvedAbstractPredicates + * @return + */ + public int semanticDistance( + Concept from, + Concept to, + Concept context, + boolean compareInherency, + Map resolvedAbstractPredicates) { + + int distance = 0; + + // String resolving = this.getDefinition(); + // String resolved = concept.getDefinition(); + // System.out.println("Does " + resolving + " resolve " + resolved + "?"); + + int mainDistance = + coreDistance(from, to, context, compareInherency, resolvedAbstractPredicates); + distance += mainDistance * 50; + if (distance < 0) { + return distance; + } + + // should have all the same traits - additional traits are allowed only + // in contextual types + Set acceptedTraits = new HashSet<>(); + for (Concept t : traits(from)) { + if (t.isAbstract() + && resolvedAbstractPredicates != null + && resolvedAbstractPredicates.containsKey(t)) { + distance += assertedDistance(resolvedAbstractPredicates.get(t), t); + acceptedTraits.add(resolvedAbstractPredicates.get(t)); + } else { + boolean ok = hasTrait(to, t); + if (!ok) { + return -50; + } + } + } + + for (Concept t : traits(to)) { + if (!acceptedTraits.contains(t) && !hasTrait(from, t)) { + return -50; + } + } + + // same with roles. + Set acceptedRoles = new HashSet<>(); + for (Concept t : roles(from)) { + if (t.isAbstract() + && resolvedAbstractPredicates != null + && resolvedAbstractPredicates.containsKey(t)) { + distance += assertedDistance(resolvedAbstractPredicates.get(t), t); + acceptedRoles.add(resolvedAbstractPredicates.get(t)); + } else { + boolean ok = hasRole(to, t); + if (!ok) { + return -50; + } + } + } + + for (Concept t : roles(to)) { + if (!acceptedRoles.contains(t) && !hasRole(from, t)) { + return -50; + } + } + + // if (context == null) { + // context = context(to); + // } + + int component; + + if (compareInherency) { + + // component = distance(context(from), context, true); + // + // if (component < 0) { + // double d = ((double) component / 10.0); + // return -1 * (int) (d > 10 ? d : 10); + // } + // distance += component; + + /* + * any EXPLICIT inherency must be the same in both. + */ + Concept ourExplicitInherent = directInherent(from); + Concept itsExplicitInherent = directInherent(to); + + if (ourExplicitInherent != null || itsExplicitInherent != null) { + if (ourExplicitInherent != null && itsExplicitInherent != null) { + component = distance(ourExplicitInherent, itsExplicitInherent, true); + + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + } else { + return -50; } - // TODO Auto-generated method stub - - } - - private void saveConfiguration() { - File config = BaseService.getFileInConfigurationDirectory(startupOptions, "reasoner.yaml"); - org.integratedmodelling.common.utils.Utils.YAML.save(this.configuration, config); - } + } - @Override - public Concept defineConcept(KimConceptStatement statement, Scope scope) { - return build(statement, this.owl.requireOntology(statement.getNamespace(), - OWL.DEFAULT_ONTOLOGY_PREFIX), null, scope); - } + /* + * inherency must be same (theirs is ours) unless our inherent type is abstract + */ + Concept ourInherent = inherent(from); + Concept itsInherent = inherent(to); - @Override - public boolean scopesAreReactive() { - return false; - } + if (ourInherent != null || itsInherent != null) { - @Override - public ServiceStatus status() { - var ret = super.status(); - if (ret instanceof ServiceStatusImpl serviceStatus) { - serviceStatus.setConsistent(this.consistent.get()); + if (ourInherent != null && ourInherent.isAbstract()) { + component = distance(ourInherent, itsInherent, false); + } else if (ourInherent == null && itsInherent != null && context != null) { + /* + * Situations like: does XXX resolve YYY of ZZZ when ZZZ is the context. + */ + component = distance(context, itsInherent, false); + } else { + component = distance(itsInherent, ourInherent, false); } - return ret; - } - @Override - public Concept resolveConcept(String definition) { - Concept ret = concepts.get(definition); - if (ret == null) { - KimConcept parsed = scope.getService(ResourcesService.class).resolveConcept(definition); - if (parsed != null) { - ret = declareConcept(parsed); - concepts.put(definition, ret); - } else { - // TODO add an error concept in case of errors or null - } + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); } - return ret; + distance += component; + } } - @Override - public Observable resolveObservable(String definition) { - Observable ret = observables.get(definition); - if (ret == null) { - KimObservable parsed = scope.getService(ResourcesService.class).resolveObservable(definition); - if (parsed != null) { - ret = declareObservable(parsed); - observables.put(definition, ret); - } else { - // TODO add an error observable in case of errors or null - } - } - return ret; + component = distance(goal(from), goal(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); } + distance += component; - private Observable errorObservable(String definition) { - // TODO Auto-generated method stub - return null; + component = distance(cooccurrent(from), cooccurrent(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); } + distance += component; - private Concept errorConcept(String definition) { - // TODO Auto-generated method stub - return null; + component = distance(causant(from), causant(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); } + distance += component; - @Override - public Collection operands(Semantics target) { - List ret = new ArrayList<>(); - if (target.is(SemanticType.UNION) || target.is(SemanticType.INTERSECTION)) { - ret.addAll(this.owl.getOperands(target.asConcept())); - } else { - ret.add(target.asConcept()); - } - - return ret; + component = distance(caused(from), caused(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); } + distance += component; - @Override - public Collection children(Semantics target) { - return this.owl.getChildren(target.asConcept()); - } - - public Map> emergentResolvables(Observation trigger, - ContextScope scope) { - - Map> ret = new HashMap<>(); - Collection emergents = this.emergence.get(trigger.getObservable().getSemantics()); - - // if (!(scope instanceof IRuntimeScope) || ((IRuntimeScope) - // scope).getActuator() == null) { - // return Collections.emptyMap(); - // } - // - // Mode mode = ((IRuntimeScope) scope).getActuator().getMode(); - // - // /* - // * Skip a search in the map if we can't trigger anything. - // */ - // if (!trigger.getObservable().is(Type.QUALITY) - // && !(trigger.getObservable().is(Type.RELATIONSHIP) && mode == - // Mode.INSTANTIATION)) { - // return Collections.emptyMap(); - // } - // - // Map> ret = new HashMap<>(); - // Collection emergents = - // this.emergence.get(trigger.getObservable().getType()); - // - // if (emergents != null) { - // - // for (Emergence emergent : emergents) { - // - // Collection match = - // emergent.matches(trigger.getObservable().getType(), - // (IRuntimeScope) scope); - // - // /* - // * if process or configuration, update and skip if the scope already contains - // * the emergent observation - // */ - // if (emergent.emergentObservable.is(Type.PROCESS) - // || emergent.emergentObservable.is(Type.CONFIGURATION)) { - // if (((IRuntimeScope) scope).getCatalog() - // .get(new ObservedConcept(emergent.emergentObservable)) != null) { - // /* - // * TODO update with the new observation(s)! API to be defined - // */ - // if (((IDirectObservation) trigger).getOriginatingPattern() != null) { - // ((IDirectObservation) trigger).getOriginatingPattern().update(trigger); - // return ret; - // } - // } - // } - // - // ret.put(emergent.emergentObservable, match); - // } - // } - return ret; + component = distance(adjacent(from), adjacent(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); } + distance += component; - @Override - public Collection parents(Semantics target) { - return this.owl.getParents(target.asConcept()); + component = distance(compresent(from), compresent(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); } + distance += component; - @Override - public Collection allChildren(Semantics target) { - - Set ret = collectChildren(target, new HashSet()); - ret.add(target.asConcept()); - - return ret; + component = distance(relativeTo(from), relativeTo(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); } + distance += component; - private Set collectChildren(Semantics target, Set hashSet) { + return distance; + } - for (Concept c : children(target)) { - if (!hashSet.contains(c)) collectChildren(c, hashSet); - hashSet.add(c); - } - return hashSet; - } + /** + * Get the distance between the core described observables after factoring out all operators and + * ensuring they are the same. If not the same, the concepts are incompatible and the distance is + * negative. + * + * @param to + * @return + */ + public int coreDistance( + Concept from, + Concept to, + Concept context, + boolean compareInherency, + Map resolvedAbstractPredicates) { - @Override - public Collection allParents(Semantics target) { - return allParentsInternal(target, new HashSet()); + if (from == to || from.equals(to)) { + return 0; } - private Collection allParentsInternal(Semantics target, Set seen) { + Pair> c1ops = splitOperators(from); + Pair> c2ops = splitOperators(to); - Set concepts = new HashSet<>(); - - if (seen.contains(target)) { - return concepts; - } - - seen.add(target.asConcept()); - - for (Concept c : parents(target)) { - concepts.add(c); - concepts.addAll(allParentsInternal(c, seen)); - } - - return concepts; + if (!c1ops.getSecond().equals(c2ops.getSecond())) { + return -50; } - @Override - public Collection closure(Semantics target) { - return this.owl.getSemanticClosure(target.asConcept()); + if (!c1ops.getSecond().isEmpty()) { + /* + * if operators were extracted, the distance must take into account traits and + * the like for the concepts they describe, so call the main method again, which + * will call this and perform the core check below. + */ + return semanticDistance( + c1ops.getFirst(), + c2ops.getFirst(), + context, + compareInherency, + resolvedAbstractPredicates); } - @Override - public boolean resolves(Semantics toResolve, Semantics other, Semantics context) { - - /* - TODO if these are observables, the observer also must be considered and matched by semantic distance. - - TODO the observable now just carries an Observer identity but NOT a contract about the type of - observer it - accepts. This is necessary for models to be able to declare their observers before they are actually - observed. - - In each observable, the actual Observer (if semantic) takes over the contract when matching. This, - with the - fact that the observer may be a mere Identity, makes for a pretty complicated comparison. - - The observer instance is an Identity - if that's also a DirectObservation use semantics, otherwise - match with - equals(). - - An incoming without observer will match one with, but not the other way around unless the observers - are - compatible. - */ - - return semanticDistance(toResolve, other, context) >= 0; - } + Concept core1 = coreObservable(c1ops.getFirst()); + Concept core2 = coreObservable(c2ops.getFirst()); + /* + * FIXME this must check: have operator ? (operator == operator && coreObs == + * coreObs) : coreObs == coreObs; + */ - @Override - public int semanticDistance(Semantics target, Semantics other) { - return semanticDistance(target.asConcept(), other.asConcept(), null, true, null); + if (core1 == null || core2 == null) { + return -100; } - @Override - public int semanticDistance(Semantics target, Semantics other, Semantics context) { - return semanticDistance(target.asConcept(), other.asConcept(), context == null ? null : - context.asConcept(), true, null); + if (!from.is(SemanticType.PREDICATE) && !core1.equals(core2)) { + /* + * in order to resolve an observation, the core observables must be equal; + * subsumption is not OK (lidar elevation does not resolve elevation as it + * creates different observations; same for different observation techniques - + * easy strategy to annotate techs that make measurements incompatible = use a + * subclass instead of a related trait). + * + * Predicates are unique in being able to resolve a more specific predicate. + */ + return -50; } /** - * The workhorse of semantic distance computation can also consider any predicates that were abstract in - * the lineage of the passed concept (i.e. the concept is the result of a query with the abstract - * predicates, which has been contextualized to incarnate them into the passed correspondence with - * concrete counterparts). In that case, and only in that case, the distance between a concrete candidate - * and one that contains its predicates in the abstract form can be positive, i.e. a concept with abstract - * predicates can resolve one with concrete subclasses as long as the lineage contains its resolution. - * - * @param to - * @param context - * @param compareInherency - * @param resolvedAbstractPredicates - * @return + * Previously returning the distance, which does not work unless the core observables are the + * same (differentiated by predicates only) - which for example makes identities under 'type of' + * be compatible no matter the identity. */ - public int semanticDistance(Concept from, Concept to, Concept context, boolean compareInherency, - Map resolvedAbstractPredicates) { - - int distance = 0; - - // String resolving = this.getDefinition(); - // String resolved = concept.getDefinition(); - // System.out.println("Does " + resolving + " resolve " + resolved + "?"); - - int mainDistance = coreDistance(from, to, context, compareInherency, resolvedAbstractPredicates); - distance += mainDistance * 50; - if (distance < 0) { - return distance; - } - - // should have all the same traits - additional traits are allowed only - // in contextual types - Set acceptedTraits = new HashSet<>(); + return core1.equals(core2) + ? assertedDistance(from, to) + : (assertedDistance(from, to) == 0 ? 0 : -1); + } + + private int distance(Concept from, Concept to, boolean acceptAbsent) { + + int ret = 0; + if (from == null && to != null) { + ret = acceptAbsent ? 50 : -50; + } else if (from != null && to == null) { + ret = -50; + } else if (from != null && to != null) { + ret = is(to, from) ? assertedDistance(to, from) : -100; + if (ret >= 0) { for (Concept t : traits(from)) { - if (t.isAbstract() && resolvedAbstractPredicates != null && resolvedAbstractPredicates.containsKey(t)) { - distance += assertedDistance(resolvedAbstractPredicates.get(t), t); - acceptedTraits.add(resolvedAbstractPredicates.get(t)); - } else { - boolean ok = hasTrait(to, t); - if (!ok) { - return -50; - } - } - } - - for (Concept t : traits(to)) { - if (!acceptedTraits.contains(t) && !hasTrait(from, t)) { - return -50; - } - } - - // same with roles. - Set acceptedRoles = new HashSet<>(); - for (Concept t : roles(from)) { - if (t.isAbstract() && resolvedAbstractPredicates != null && resolvedAbstractPredicates.containsKey(t)) { - distance += assertedDistance(resolvedAbstractPredicates.get(t), t); - acceptedRoles.add(resolvedAbstractPredicates.get(t)); - } else { - boolean ok = hasRole(to, t); - if (!ok) { - return -50; - } - } - } - - for (Concept t : roles(to)) { - if (!acceptedRoles.contains(t) && !hasRole(from, t)) { - return -50; - } - } - - // if (context == null) { - // context = context(to); - // } - - int component; - - if (compareInherency) { - - // component = distance(context(from), context, true); - // - // if (component < 0) { - // double d = ((double) component / 10.0); - // return -1 * (int) (d > 10 ? d : 10); - // } - // distance += component; - - /* - * any EXPLICIT inherency must be the same in both. - */ - Concept ourExplicitInherent = directInherent(from); - Concept itsExplicitInherent = directInherent(to); - - if (ourExplicitInherent != null || itsExplicitInherent != null) { - if (ourExplicitInherent != null && itsExplicitInherent != null) { - component = distance(ourExplicitInherent, itsExplicitInherent, true); - - if (component < 0) { - double d = ((double) component / 10.0); - return -1 * (int) (d > 10 ? d : 10); - } - distance += component; - } else { - return -50; - } - } - - /* - * inherency must be same (theirs is ours) unless our inherent type is abstract - */ - Concept ourInherent = inherent(from); - Concept itsInherent = inherent(to); - - if (ourInherent != null || itsInherent != null) { - - if (ourInherent != null && ourInherent.isAbstract()) { - component = distance(ourInherent, itsInherent, false); - } else if (ourInherent == null && itsInherent != null && context != null) { - /* - * Situations like: does XXX resolve YYY of ZZZ when ZZZ is the context. - */ - component = distance(context, itsInherent, false); - } else { - component = distance(itsInherent, ourInherent, false); - } - - if (component < 0) { - double d = ((double) component / 10.0); - return -1 * (int) (d > 10 ? d : 10); - } - distance += component; - } - - } - - component = distance(goal(from), goal(to), false); - if (component < 0) { - double d = ((double) component / 10.0); - return -1 * (int) (d > 10 ? d : 10); - } - distance += component; - - component = distance(cooccurrent(from), cooccurrent(to), false); - if (component < 0) { - double d = ((double) component / 10.0); - return -1 * (int) (d > 10 ? d : 10); - } - distance += component; - - component = distance(causant(from), causant(to), false); - if (component < 0) { - double d = ((double) component / 10.0); - return -1 * (int) (d > 10 ? d : 10); - } - distance += component; - - component = distance(caused(from), caused(to), false); - if (component < 0) { - double d = ((double) component / 10.0); - return -1 * (int) (d > 10 ? d : 10); - } - distance += component; - - component = distance(adjacent(from), adjacent(to), false); - if (component < 0) { - double d = ((double) component / 10.0); - return -1 * (int) (d > 10 ? d : 10); - } - distance += component; - - component = distance(compresent(from), compresent(to), false); - if (component < 0) { - double d = ((double) component / 10.0); - return -1 * (int) (d > 10 ? d : 10); - } - distance += component; - - component = distance(relativeTo(from), relativeTo(to), false); - if (component < 0) { - double d = ((double) component / 10.0); - return -1 * (int) (d > 10 ? d : 10); - } - distance += component; - - return distance; - } - - /** - * Get the distance between the core described observables after factoring out all operators and ensuring - * they are the same. If not the same, the concepts are incompatible and the distance is negative. - * - * @param to - * @return - */ - public int coreDistance(Concept from, Concept to, Concept context, boolean compareInherency, - Map resolvedAbstractPredicates) { - - if (from == to || from.equals(to)) { - return 0; - } - - Pair> c1ops = splitOperators(from); - Pair> c2ops = splitOperators(to); - - if (!c1ops.getSecond().equals(c2ops.getSecond())) { - return -50; - } - - if (!c1ops.getSecond().isEmpty()) { - /* - * if operators were extracted, the distance must take into account traits and - * the like for the concepts they describe, so call the main method again, which - * will call this and perform the core check below. - */ - return semanticDistance(c1ops.getFirst(), c2ops.getFirst(), context, compareInherency, - resolvedAbstractPredicates); - } - - Concept core1 = coreObservable(c1ops.getFirst()); - Concept core2 = coreObservable(c2ops.getFirst()); - - /* - * FIXME this must check: have operator ? (operator == operator && coreObs == - * coreObs) : coreObs == coreObs; - */ - - if (core1 == null || core2 == null) { - return -100; - } - - if (!from.is(SemanticType.PREDICATE) && !core1.equals(core2)) { - /* - * in order to resolve an observation, the core observables must be equal; - * subsumption is not OK (lidar elevation does not resolve elevation as it - * creates different observations; same for different observation techniques - - * easy strategy to annotate techs that make measurements incompatible = use a - * subclass instead of a related trait). - * - * Predicates are unique in being able to resolve a more specific predicate. - */ + boolean ok = hasTrait(to, t); + if (!ok) { return -50; + } } - - /** - * Previously returning the distance, which does not work unless the core - * observables are the same (differentiated by predicates only) - which for - * example makes identities under 'type of' be compatible no matter the - * identity. - */ - return core1.equals(core2) ? assertedDistance(from, to) : (assertedDistance(from, to) == 0 ? 0 : -1); - } - - private int distance(Concept from, Concept to, boolean acceptAbsent) { - - int ret = 0; - if (from == null && to != null) { - ret = acceptAbsent ? 50 : -50; - } else if (from != null && to == null) { - ret = -50; - } else if (from != null && to != null) { - ret = is(to, from) ? assertedDistance(to, from) : -100; - if (ret >= 0) { - for (Concept t : traits(from)) { - boolean ok = hasTrait(to, t); - if (!ok) { - return -50; - } - } - for (Concept t : traits(to)) { - if (!hasTrait(from, t)) { - ret += 10; - } - } - } - } - - return ret > 100 ? 100 : ret; - } - - @Override - public Concept coreObservable(Semantics first) { - String def = first.getMetadata().get(NS.CORE_OBSERVABLE_PROPERTY, String.class); - Concept ret = first.asConcept(); - while (def != null) { - ret = resolveConcept(def); - if (ret.getMetadata().get(NS.CORE_OBSERVABLE_PROPERTY) != null && !ret.getUrn().equals(def)) { - def = ret.getMetadata().get(NS.CORE_OBSERVABLE_PROPERTY, String.class); - } else { - break; - } - } - return ret; - } - - @Override - public Pair> splitOperators(Semantics concept) { - - Concept cret = concept.asConcept(); - List types = new ArrayList<>(); - Set type = Sets.intersection(cret.getType(), SemanticType.OPERATOR_TYPES); - - while (type.size() > 0) { - types.add(type.iterator().next()); - Concept ccret = describedType(cret); - if (ccret == null) { - break; - } else { - cret = ccret; - } - type = Sets.intersection(cret.getType(), SemanticType.OPERATOR_TYPES); - } - - return Pair.of(cret, types); - } - - @Override - public Concept describedType(Semantics concept) { - Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.DESCRIBES_OBSERVABLE_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Collection traits(Semantics concept) { - Set ret = new HashSet<>(); - ret.addAll(this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_REALM_PROPERTY))); - ret.addAll(this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_IDENTITY_PROPERTY))); - ret.addAll(this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_ATTRIBUTE_PROPERTY))); + for (Concept t : traits(to)) { + if (!hasTrait(from, t)) { + ret += 10; + } + } + } + } + + return ret > 100 ? 100 : ret; + } + + @Override + public Concept coreObservable(Semantics first) { + String def = first.getMetadata().get(NS.CORE_OBSERVABLE_PROPERTY, String.class); + Concept ret = first.asConcept(); + while (def != null) { + ret = resolveConcept(def); + if (ret.getMetadata().get(NS.CORE_OBSERVABLE_PROPERTY) != null && !ret.getUrn().equals(def)) { + def = ret.getMetadata().get(NS.CORE_OBSERVABLE_PROPERTY, String.class); + } else { + break; + } + } + return ret; + } + + @Override + public Pair> splitOperators(Semantics concept) { + + Concept cret = concept.asConcept(); + List types = new ArrayList<>(); + Set type = Sets.intersection(cret.getType(), SemanticType.OPERATOR_TYPES); + + while (type.size() > 0) { + types.add(type.iterator().next()); + Concept ccret = describedType(cret); + if (ccret == null) { + break; + } else { + cret = ccret; + } + type = Sets.intersection(cret.getType(), SemanticType.OPERATOR_TYPES); + } + + return Pair.of(cret, types); + } + + @Override + public Concept describedType(Semantics concept) { + Collection cls = + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.DESCRIBES_OBSERVABLE_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Collection traits(Semantics concept) { + Set ret = new HashSet<>(); + ret.addAll( + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_REALM_PROPERTY))); + ret.addAll( + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_IDENTITY_PROPERTY))); + ret.addAll( + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_ATTRIBUTE_PROPERTY))); + return ret; + } + + @Override + public int assertedDistance(Semantics from, Semantics to) { + + if (from == to || from.equals(to)) { + return 0; + } + int ret = 1; + while (true) { + Collection parents = parents(from); + if (parents.isEmpty()) { + break; + } + if (parents.contains(to)) { return ret; + } + for (Concept parent : parents) { + int d = assertedDistance(from, parent); + if (d >= 0) { + return ret + d; + } + } + ret++; + } + return -1; + } + + @Override + public boolean hasTrait(Semantics concept, Concept trait) { + for (Concept c : traits(concept)) { + if (is(c, trait)) { + return true; + } } + return false; + } - @Override - public int assertedDistance(Semantics from, Semantics to) { - - if (from == to || from.equals(to)) { - return 0; - } - int ret = 1; - while (true) { - Collection parents = parents(from); - if (parents.isEmpty()) { - break; - } - if (parents.contains(to)) { - return ret; - } - for (Concept parent : parents) { - int d = assertedDistance(from, parent); - if (d >= 0) { - return ret + d; - } - } - ret++; - } - return -1; - } - - @Override - public boolean hasTrait(Semantics concept, Concept trait) { - for (Concept c : traits(concept)) { - if (is(c, trait)) { - return true; - } - } - return false; - } - - @Override - public Collection roles(Semantics concept) { - return this.owl.getRestrictedClasses(concept.asConcept(), this.owl.getProperty(NS.HAS_ROLE_PROPERTY)); - } - - @Override - public boolean hasRole(Semantics concept, Concept role) { - for (Concept c : roles(concept)) { - if (is(c, role)) { - return true; - } - } - return false; - } - - // @Override - // public Concept directContext(Semantics concept) { - // Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), - // this.owl.getProperty(NS.HAS_CONTEXT_PROPERTY)); - // return cls.isEmpty() ? null : cls.iterator().next(); - // } - // - // @Override - // public Concept context(Semantics concept) { - // Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - // this.owl.getProperty(NS.HAS_CONTEXT_PROPERTY)); - // return cls.isEmpty() ? null : cls.iterator().next(); - // } - - @Override - public Concept directInherent(Semantics concept) { - Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.IS_INHERENT_TO_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept inherent(Semantics concept) { - Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.IS_INHERENT_TO_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept directGoal(Semantics concept) { - Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_PURPOSE_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept goal(Semantics concept) { - Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_PURPOSE_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept directCooccurrent(Semantics concept) { - Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.OCCURS_DURING_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept directCausant(Semantics concept) { - Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_CAUSANT_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept directCaused(Semantics concept) { - Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_CAUSED_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept directAdjacent(Semantics concept) { - Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.IS_ADJACENT_TO_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept directCompresent(Semantics concept) { - Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_COMPRESENT_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept directRelativeTo(Semantics concept) { - Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.IS_COMPARED_TO_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept cooccurrent(Semantics concept) { - Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.OCCURS_DURING_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept causant(Semantics concept) { - Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_CAUSANT_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept caused(Semantics concept) { - Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_CAUSED_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept adjacent(Semantics concept) { - Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.IS_ADJACENT_TO_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept compresent(Semantics concept) { - Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_COMPRESENT_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public Concept relativeTo(Semantics concept) { - Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.IS_COMPARED_TO_PROPERTY)); - return cls.isEmpty() ? null : cls.iterator().next(); - } - - @Override - public String displayLabel(Semantics concept) { - String ret = displayName(concept); - if (!ret.contains(" ")) { - ret = StringUtils.capitalize(CamelCase.toLowerCase(ret, ' ')); - } - return ret; - } - - @Override - public String displayName(Semantics semantics) { - return semantics instanceof Observable ? observableDisplayName((Observable) semantics) : - conceptDisplayName(semantics.asConcept()); - } - - private String conceptDisplayName(Concept t) { - - String ret = t.getMetadata().get(NS.DISPLAY_LABEL_PROPERTY, String.class); - - if (ret == null) { - ret = t.getMetadata().get(Metadata.DC_LABEL, String.class); - } - if (ret == null) { - ret = t.getName(); - } - if (ret.startsWith("i")) { - ret = ret.substring(1); - } - return ret; - } - - private String observableDisplayName(Observable o) { - - StringBuilder ret = new StringBuilder(conceptDisplayName(o.asConcept())); - - for (Pair operator : o.getValueOperators()) { - - ret.append(StringUtils.capitalize(operator.getFirst().declaration.replace(' ', '_'))); - - if (operator.getSecond() instanceof KimConcept concept) { - ret.append(conceptDisplayName(declareConcept(concept))); - } else if (operator.getSecond() instanceof KimObservable observable) { - ret.append(observableDisplayName(declareObservable(observable))); - } else { - ret.append("_").append(operator.getSecond().toString().replace(' ', '_')); - } - } - return ret.toString(); - } - - @Override - public String style(Concept concept) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Capabilities capabilities(Scope scope) { - - var ret = new ReasonerCapabilitiesImpl(); - - ret.setWorldviewId(worldview == null ? null : worldview.getWorldviewId()); - ret.setLocalName(localName); - ret.setType(Type.REASONER); - ret.setUrl(getUrl()); - ret.setServerId(hardwareSignature == null ? null : ("REASONER_" + hardwareSignature)); - ret.setServiceId(configuration.getServiceId()); - ret.setServiceName("Reasoner"); - ret.setBrokerURI((embeddedBroker != null && embeddedBroker.isOnline()) ? embeddedBroker.getURI() : - configuration.getBrokerURI()); - ret.getExportSchemata().putAll(ResourceTransport.INSTANCE.getExportSchemata()); - ret.getImportSchemata().putAll(ResourceTransport.INSTANCE.getImportSchemata()); - - ret.setAvailableMessagingQueues(Utils.URLs.isLocalHost(getUrl()) ? - EnumSet.of(Message.Queue.Info, Message.Queue.Errors, - Message.Queue.Warnings, Message.Queue.Events) : - EnumSet.noneOf(Message.Queue.class)); - return ret; - } - - @Override - public String serviceId() { - return configuration.getServiceId(); - } - - @Override - public Collection identities(Semantics concept) { - return this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_IDENTITY_PROPERTY)); - } - - @Override - public Collection attributes(Semantics concept) { - return this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_ATTRIBUTE_PROPERTY)); - } - - @Override - public Collection realms(Semantics concept) { - return this.owl.getRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_REALM_PROPERTY)); - } - - @Override - public Concept baseParentTrait(Semantics trait) { - - String orig = trait.getMetadata().get(CoreOntology.NS.ORIGINAL_TRAIT, String.class); - if (orig != null) { - trait = this.owl.getConcept(orig); - } - - /* - * there should only be one of these or none. - */ - if (trait.getMetadata().get(NS.BASE_DECLARATION) != null) { - return (Concept) trait; - } - - for (Concept c : parents(trait)) { - Concept r = baseParentTrait(c); - if (r != null) { - return r; - } - } - - return null; - } - - @Override - public boolean hasDirectTrait(Semantics type, Concept trait) { - - for (Concept c : directTraits(type)) { - if (is(trait, c)) { - return true; - } - } - - return false; - } - - @Override - public boolean hasDirectRole(Semantics type, Concept trait) { - for (Concept c : directRoles(type)) { - if (is(trait, c)) { - return true; - } - } - return false; - } - - @Override - public Collection directTraits(Semantics concept) { - Set ret = new HashSet<>(); - ret.addAll(this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_REALM_PROPERTY))); - ret.addAll(this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_IDENTITY_PROPERTY))); - ret.addAll(this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_ATTRIBUTE_PROPERTY))); - return ret; - } - - @Override - public Collection directAttributes(Semantics concept) { - return this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_ATTRIBUTE_PROPERTY)); - } - - @Override - public Collection directIdentities(Semantics concept) { - return this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_IDENTITY_PROPERTY)); - } - - @Override - public Collection directRealms(Semantics concept) { - return this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_REALM_PROPERTY)); - } - - @Override - public Concept negated(Concept concept) { - return this.owl.makeNegation(concept.asConcept(), this.owl.getOntology(concept.getNamespace())); - } - - @Override - public SemanticType observableType(Semantics observable, boolean acceptTraits) { - if (observable instanceof Observable && ((Observable) observable).getArtifactType().equals(Artifact.Type.VOID)) { - return SemanticType.NOTHING; - } - Set type = EnumSet.copyOf(observable.asConcept().getType()); - type.retainAll(SemanticType.BASE_MODELABLE_TYPES); - if (type.size() != 1) { - throw new IllegalArgumentException("trying to extract the observable type from non-observable " + observable); - } - return type.iterator().next(); - } - - @Override - public Concept relationshipSource(Semantics relationship) { - Collection ret = relationshipSources(relationship); - return ret.size() == 0 ? null : ret.iterator().next(); - } - - @Override - public Collection relationshipSources(Semantics relationship) { - return org.integratedmodelling.common.utils.Utils.Collections.join(this.owl.getDirectRestrictedClasses(relationship.asConcept(), this.owl.getProperty(NS.IMPLIES_SOURCE_PROPERTY)), this.owl.getRestrictedClasses(relationship.asConcept(), this.owl.getProperty(NS.IMPLIES_SOURCE_PROPERTY))); - } - - @Override - public Concept relationshipTarget(Semantics relationship) { - Collection ret = relationshipTargets(relationship); - return ret.size() == 0 ? null : ret.iterator().next(); - } - - @Override - public Collection relationshipTargets(Semantics relationship) { - return org.integratedmodelling.common.utils.Utils.Collections.join(this.owl.getDirectRestrictedClasses(relationship.asConcept(), this.owl.getProperty(NS.IMPLIES_DESTINATION_PROPERTY)), this.owl.getRestrictedClasses(relationship.asConcept(), this.owl.getProperty(NS.IMPLIES_DESTINATION_PROPERTY))); - } - - @Override - public boolean satisfiable(Semantics ret) { - return this.owl.isSatisfiable(ret); - } - - @Override - public Collection applicableObservables(Concept main) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Collection directRoles(Semantics concept) { - return this.owl.getDirectRestrictedClasses(concept.asConcept(), - this.owl.getProperty(NS.HAS_ROLE_PROPERTY)); - } - - @Override - public ResourceSet loadKnowledge(Worldview worldview, Scope scope) { - - List ret = new ArrayList<>(); - - scope = getScopeManager().collectMessagePayload(scope, Notification.class, ret); - - if (worldview.isEmpty()) { - return ResourceSet.empty(); - } - - this.worldview = worldview; - - this.owl.initialize(worldview.getOntologies().getFirst()); - for (KimOntology ontology : worldview.getOntologies()) { - for (var statement : ontology.getStatements()) { - defineConcept(statement, scope); - } - this.owl.registerWithReasoner(ontology); - } - this.owl.flushReasoner(); - for (var strategyDocument : worldview.getObservationStrategies()) { - for (var strategy : strategyDocument.getStatements()) { - observationReasoner.registerStrategy(strategy); - } - } - observationReasoner.initializeStrategies(); - - // assess consistent status - this.consistent.set(Utils.Notifications.hasErrors(ret)); - - return Utils.Resources.createFromLexicalNotifications(ret); - } - - @Override - public synchronized ResourceSet updateKnowledge(ResourceSet changes, UserScope scope) { - - var ownResources = scope.getService(ResourcesService.class); - Map services = new HashMap<>(); - - serviceScope().setMaintenanceMode(true); - - // delete caches - this.concepts.clear(); - this.observables.clear(); - - boolean inconsistent = false; - - try { - /* - release all ontologies first. This should not be necessary but it prevents a NPE in case there are - forward references - which the syntax should flag as errors, but doesn't at the moment. - */ - for (var resource : changes.getOntologies()) { - var ontology = this.owl.getOntology(resource.getResourceUrn()); - if (ontology != null) { - this.owl.releaseOntology(ontology); - } - } - - for (var resource : changes.getOntologies()) { - - var resourceService = ownResources; - if (!resourceService.capabilities(scope).getServiceId().equals(resource.getServiceId())) { - resourceService = - services.computeIfAbsent(changes.getServices().get(resource.getServiceId()), - url -> new ResourcesClient(url, scope.getIdentity(), this, - settingsForSlaveServices)); - } - - var notifications = new ArrayList(); - var parsingScope = getScopeManager().collectMessagePayload(scope, Notification.class, - notifications); - var ontology = resourceService.resolveOntology(resource.getResourceUrn(), parsingScope); - for (var statement : ontology.getStatements()) { - defineConcept(statement, parsingScope); - } - this.owl.registerWithReasoner(ontology); - resource.getNotifications().addAll(notifications); - - if (Utils.Notifications.hasErrors(notifications)) { - inconsistent = true; - } - } - - for (var resource : changes.getObservationStrategies()) { - - var resourceService = ownResources; - if (!resourceService.capabilities(scope).getServiceId().equals(resource.getServiceId())) { - resourceService = - services.computeIfAbsent(changes.getServices().get(resource.getServiceId()), - url -> new ResourcesClient(url, scope.getIdentity(), this, - settingsForSlaveServices)); - } - - var notifications = new ArrayList(); - var parsingScope = getScopeManager().collectMessagePayload(scope, Notification.class, - notifications); - var observationStrategyDocument = - resourceService.resolveObservationStrategyDocument(resource.getResourceUrn(), - parsingScope); - - observationReasoner.releaseNamespace(observationStrategyDocument.getUrn()); - for (var strategy : observationStrategyDocument.getStatements()) { - observationReasoner.registerStrategy(strategy); - } - observationReasoner.initializeStrategies(); - - resource.getNotifications().addAll(notifications); - } - } catch (Throwable t) { - inconsistent = true; - scope.send(Notification.error(t)); - } finally { - serviceScope().setMaintenanceMode(false); - } - - if (inconsistent) { - this.consistent.set(false); - } - - return changes; - } - - public void setLocalName(String localName) { - this.localName = localName; - } - - @Override - public boolean is(Semantics concept, Semantics other) { - - if (concept == other || concept.equals(other)) { - return true; - } - - if (concept.asConcept().isCollective() != other.asConcept().isCollective()) { - return false; - } - - /* - * first use "isn't" based on the enum types to quickly cut out those that don't - * match. Also works with concepts in different ontologies that have the same - * definition. - */ - if (inWorldview(concept, other)) { - var fundamentalType = SemanticType.fundamentalType(other.asConcept().getType()); - if (fundamentalType != null && !Sets.intersection(concept.asConcept().getType(), - other.asConcept().getType()).contains(fundamentalType)) { - return false; - } - } - - /* - * TODO this would be a good point to insert caching logics. It should also go - * in all remote clients. - */ - - /* - * Speed up checking for logical expressions without forcing the reasoner to - * compute complex logics. - */ - if (concept.is(SemanticType.UNION)) { - - for (Concept c : operands(concept)) { - if (is(c, other)) { - return true; - } - } - - } else if (concept.is(SemanticType.INTERSECTION)) { - - for (Concept c : operands(concept)) { - if (!is(c, other)) { - return false; - } - } - return true; - - } else { - /* - * use the semantic closure. We may want to cache this eventually. - */ - Collection collection = allParents(concept); - collection.add(concept.asConcept()); - return collection.contains(other); - } - return false; - } - - @Override - public Semantics domain(Semantics conceptImpl) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Concept declareConcept(KimConcept conceptDeclaration) { - return declare(conceptDeclaration, this.owl.requireOntology(conceptDeclaration.getNamespace()), - scope); - } - - @Override - public Observable declareObservable(KimObservable observableDeclaration) { - return declare(observableDeclaration, - this.owl.requireOntology(observableDeclaration.getSemantics().getNamespace()), scope); - } - - @Override - public Observable declareObservable(KimObservable observableDeclaration, - Map patternVariables) { - - if (observableDeclaration.getPattern() == null) { - return declareObservable(observableDeclaration); - } - String urn = observableDeclaration.getPattern(); - for (var key : observableDeclaration.getPatternVariables()) { - var value = patternVariables.get(key); - if (value == null) { - return null; - } - String valueCode = switch (value) { - case KimConcept kimConcept -> /*"(" + */kimConcept.getUrn()/* + ")"*/; - case KimObservable kimConcept -> /*"(" + */kimConcept.getUrn() /*+ ")"*/; - case Concept kimConcept -> /*"(" + */kimConcept.getUrn()/* + ")"*/; - case Observable kimConcept -> /*"(" + */kimConcept.getUrn()/* + ")"*/; - case String string -> "\"" + Utils.Escape.forDoubleQuotedString(string, false) + "\""; - default -> value.toString(); - }; - urn = urn.replace("$:" + key, valueCode); - } - return resolveObservable(urn); - } - - - @Override - public Concept declareConcept(KimConcept observableDeclaration, - Map patternVariables) { - - if (!observableDeclaration.isPattern()) { - return declareConcept(observableDeclaration); - } - String urn = observableDeclaration.getUrn(); - for (var key : observableDeclaration.getPatternVariables()) { - var value = patternVariables.get(key); - if (value == null) { - return null; - } - String valueCode = switch (value) { - case KimConcept kimConcept -> "(" + kimConcept.getUrn() + ")"; - case KimObservable kimConcept -> "(" + kimConcept.getUrn() + ")"; - case Concept kimConcept -> "(" + kimConcept.getUrn() + ")"; - case Observable kimConcept -> "(" + kimConcept.getUrn() + ")"; - case String string -> "\"" + Utils.Escape.forDoubleQuotedString(string, false) + "\""; - default -> value.toString(); - }; - urn = urn.replace("$:" + key, valueCode); - } - return resolveConcept(urn); - } - - @Override - public boolean compatible(Semantics o1, Semantics o2) { - return compatible(o1, o2, 0); - } - - // @Override - public boolean compatible(Semantics o1, Semantics o2, int flags) { - - if (o1 == o2 || o1.equals(o2)) { - return true; - } - - boolean mustBeSameCoreType = (flags & REQUIRE_SAME_CORE_TYPE) != 0; - boolean useRoleParentClosure = (flags & USE_ROLE_PARENT_CLOSURE) != 0; - // boolean acceptRealmDifferences = (flags & ACCEPT_REALM_DIFFERENCES) != 0; - - // TODO unsupported - boolean useTraitParentClosure = (flags & USE_TRAIT_PARENT_CLOSURE) != 0; - - /** - * The check of fundamental types is only performed when both concepts are inside the worldview. - */ - if (inWorldview(o1, o2)) { - if ((!o1.is(SemanticType.OBSERVABLE) || !o2.is(SemanticType.OBSERVABLE)) && !(o1.is(SemanticType.CONFIGURATION) && o2.is(SemanticType.CONFIGURATION))) { - return false; - } - } - - /** - * first compatibility check is a simple subsumption if o1 is abstract, or a full core observability - * check if not. - */ - if (o2.isAbstract()) { - - if (is(o2, o1)) { - return false; - } - - } else { - - Concept core1 = coreObservable(o1); - Concept core2 = coreObservable(o2); - - if (core1 == null || core2 == null || !(mustBeSameCoreType ? core1.equals(core2) : is(core1, - core2))) { - return false; - } - } - - Concept ic1 = inherent(o1); - Concept ic2 = inherent(o2); - - // same with inherency - if (ic1 == null && ic2 != null) { - return false; - } - if (ic1 != null && ic2 != null) { - if (!compatible(ic1, ic2)) { - return false; - } - } - - for (Concept t : traits(o2)) { - boolean ok = hasTrait(o1, t); - if (!ok && useTraitParentClosure) { - ok = hasDirectTrait(o1, t); - } - if (!ok) { - return false; - } - } - - for (Concept t : roles(o2)) { - boolean ok = hasRole(o1, t); - if (!ok && useRoleParentClosure) { - ok = hasParentRole(o1, t); - } - if (!ok) { - return false; - } - } + @Override + public Collection roles(Semantics concept) { + return this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_ROLE_PROPERTY)); + } + @Override + public boolean hasRole(Semantics concept, Concept role) { + for (Concept c : roles(concept)) { + if (is(c, role)) { return true; - } - - /** - * True if the concept comes from a loaded worldview. The alternative is that it comes from a core - * imported ontology, and possibly (in the future) from a conceptual extent ontology. - * - * @param semantics - * @return - */ - private boolean inWorldview(Semantics... semantics) { - for (Object o : semantics) { - if (switch (o) { - case ConceptImpl concept -> concept.getType().isEmpty(); - case KimConceptImpl concept -> concept.getType().isEmpty(); - case ObservableImpl observable -> observable.getSemantics().getType().isEmpty(); - case KimObservableImpl observable -> observable.getSemantics().getType().isEmpty(); - default -> false; - }) { - return false; - } - } - return true; - } - - @Override - public boolean hasParentRole(Semantics o1, Concept t) { - // TODO Auto-generated method stub - return false; - } - - @Override - public boolean contextuallyCompatible(Semantics focus, Semantics context1, Semantics context2) { - boolean ret = compatible(context1, context2, 0); - if (!ret && occurrent(context1)) { - ret = affectedBy(focus, context1); - Concept itsContext = inherent(context1); - if (!ret) { - if (itsContext != null) { - ret = compatible(itsContext, context2); - } - } - } - return ret; - } - - @Override - public boolean occurrent(Semantics context1) { - // TODO Auto-generated method stub - return false; - } - - @Override - public Collection affectedOrCreated(Semantics semantics) { - Set ret = new HashSet<>(); - for (Concept c : this.owl.getRestrictedClasses(semantics.asConcept(), - this.owl.getProperty(NS.AFFECTS_PROPERTY))) { - if (!this.owl.getOntology(c.getNamespace()).isInternal()) { - ret.add(c); - } - } - for (Concept c : this.owl.getRestrictedClasses(semantics.asConcept(), - this.owl.getProperty(NS.CREATES_PROPERTY))) { - if (!this.owl.getOntology(c.getNamespace()).isInternal()) { - ret.add(c); - } - } - return ret; - } - - @Override - public Collection affected(Semantics semantics) { - Set ret = new HashSet<>(); - for (Concept c : this.owl.getRestrictedClasses(semantics.asConcept(), - this.owl.getProperty(NS.AFFECTS_PROPERTY))) { - if (!this.owl.getOntology(c.getNamespace()).isInternal()) { - ret.add(c); - } - } - return ret; - } - - @Override - public Collection created(Semantics semantics) { - Set ret = new HashSet<>(); - for (Concept c : this.owl.getRestrictedClasses(semantics.asConcept(), - this.owl.getProperty(NS.CREATES_PROPERTY))) { - if (!this.owl.getOntology(c.getNamespace()).isInternal()) { - ret.add(c); - } - } - return ret; - } - - @Override - public boolean match(Semantics candidate, Semantics pattern) { - return syntacticMatcher.match(candidate, pattern); - } - - @Override - public boolean match(Semantics candidate, Semantics pattern, Map matches) { - return false; - } - - @Override - public T concretize(T pattern, Map concreteConcepts) { - return null; - } - - @Override - public T concretize(T pattern, List concreteConcepts) { - return null; - } - - @Override - public boolean affectedBy(Semantics affected, Semantics affecting) { - Concept described = describedType(affected); - for (Concept c : affected(affecting)) { - if (is(affected, c) || (described != null && is(described, c))) { - return true; - } - } - return false; - } - - @Override - public boolean createdBy(Semantics affected, Semantics affecting) { - Concept described = describedType(affected); - if (described != null && is(described, affecting)) { - return true; - } - for (Concept c : created(affecting)) { - if (is(affected, c) || (described != null && is(described, c))) { - return true; - } - } - return false; - } - - @Override - public Concept baseObservable(Semantics c) { - - if (c instanceof Concept concept) { - return concept; - } - - Collection traits = directTraits(c); - Collection roles = directRoles(c); - if (traits.size() == 0 && roles.size() == 0 && derived(c)) { - return c.asConcept(); - } - - return baseObservable(parent(c)); - } - - @Override - public Concept parent(Semantics c) { - Collection parents = this.owl.getParents(c.asConcept()); - return parents.isEmpty() ? null : parents.iterator().next(); - } - - @Override - public Concept compose(Collection concepts, LogicalConnector connector) { - - if (connector == LogicalConnector.EXCLUSION || connector == LogicalConnector.DISJOINT_UNION) { - throw new KlabIllegalArgumentException("Reasoner::compose: connector " + connector + " not " + - "supported"); - } - if (concepts.size() == 1) { - return concepts.iterator().next(); - } - if (concepts.size() > 1) { - return connector == LogicalConnector.UNION ? this.owl.getUnion(concepts, - this.owl.getOntology(concepts.iterator().next().getNamespace()), - concepts.iterator().next().getType()) : this.owl.getIntersection(concepts, - this.owl.getOntology(concepts.iterator().next().getNamespace()), - concepts.iterator().next().getType()); - } - return owl.getNothing(); - } - - @Override - public Concept rawObservable(Semantics observable) { - String def = observable.getMetadata().get(NS.CORE_OBSERVABLE_PROPERTY, String.class); - Concept ret = observable.asConcept(); - if (def != null) { - ret = resolveConcept(def); - } - return ret; - } - - @Override - public Builder observableBuilder(Observable observableImpl) { - return ObservableBuilder.getBuilder(observableImpl, scope, this); - } - - /* - * --- non-API - */ - - /* - * Record correspondence of core concept peers to worldview concepts. Called by - * KimValidator for later use at namespace construction. - */ - public void setWorldviewPeer(String coreConcept, String worldviewConcept) { - coreConceptPeers.put(worldviewConcept, coreConcept); - } - - public Concept build(KimConceptStatement concept, Ontology ontology, KimConceptStatement kimObject, - Scope monitor) { - - try { - - if (concept.isAlias() || concept.getUpperConceptDefined() != null) { - - /* - * can only have 'is' or 'equals' X; for core concepts 'is' means 'equals', and we use the - * statement to establish the semantic type. - */ - Concept parent = null; - if (concept.getUpperConceptDefined() != null) { - parent = this.owl.getConcept(concept.getUpperConceptDefined()); - if (parent == null) { - monitor.error("Core concept " + concept.getUpperConceptDefined() + " is unknown", - concept); - } else { - parent.getType().addAll(concept.getType()); - } - } else if (concept.getDeclaredParent() != null) { - parent = declareConcept(concept.getDeclaredParent()); - } - - if (parent != null) { - ontology.addDelegateConcept(concept.getUrn(), ontology.getName(), parent); - } - - return null; - } - - Concept ret = buildInternal(concept, ontology, kimObject, monitor); - - if (ret != null) { - - Concept upperConceptDefined = null; - if (concept.getDeclaredParent() == null) { - Concept parent = null; - if (concept.getUpperConceptDefined() != null) { - upperConceptDefined = parent = this.owl.getConcept(concept.getUpperConceptDefined()); - if (parent == null) { - monitor.error("Core concept " + concept.getUpperConceptDefined() + " is " + - "unknown", concept); - } - } else { - parent = this.owl.getCoreOntology().getCoreType(concept.getType()); - if (coreConceptPeers.containsKey(ret.toString())) { - // ensure that any non-trivial core inheritance is dealt with - // appropriately - parent = this.owl.getCoreOntology().alignCoreInheritance(ret); - } - } - - if (parent != null) { - ontology.add(Axiom.SubClass(parent.getNamespace() + ":" + parent.getName(), - ret.getName())); - } - } - - createProperties(ret, ontology); - ontology.define(); - - if (coreConceptPeers.containsKey(ret.toString()) && upperConceptDefined != null - /* && "true".equals(upperConceptDefined.getMetadata().get(NS.IS_CORE_KIM_TYPE, - "false")*/) { - // TODO revise - use core ontology statements only - this.owl.getCoreOntology().setAsCoreType(ret); - } - - } + } + } + return false; + } + + // @Override + // public Concept directContext(Semantics concept) { + // Collection cls = this.owl.getDirectRestrictedClasses(concept.asConcept(), + // this.owl.getProperty(NS.HAS_CONTEXT_PROPERTY)); + // return cls.isEmpty() ? null : cls.iterator().next(); + // } + // + // @Override + // public Concept context(Semantics concept) { + // Collection cls = this.owl.getRestrictedClasses(concept.asConcept(), + // this.owl.getProperty(NS.HAS_CONTEXT_PROPERTY)); + // return cls.isEmpty() ? null : cls.iterator().next(); + // } + + @Override + public Concept directInherent(Semantics concept) { + Collection cls = + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.IS_INHERENT_TO_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept inherent(Semantics concept) { + Collection cls = + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.IS_INHERENT_TO_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept directGoal(Semantics concept) { + Collection cls = + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_PURPOSE_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept goal(Semantics concept) { + Collection cls = + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_PURPOSE_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept directCooccurrent(Semantics concept) { + Collection cls = + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.OCCURS_DURING_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept directCausant(Semantics concept) { + Collection cls = + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_CAUSANT_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept directCaused(Semantics concept) { + Collection cls = + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_CAUSED_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept directAdjacent(Semantics concept) { + Collection cls = + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.IS_ADJACENT_TO_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept directCompresent(Semantics concept) { + Collection cls = + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_COMPRESENT_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept directRelativeTo(Semantics concept) { + Collection cls = + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.IS_COMPARED_TO_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept cooccurrent(Semantics concept) { + Collection cls = + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.OCCURS_DURING_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept causant(Semantics concept) { + Collection cls = + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_CAUSANT_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept caused(Semantics concept) { + Collection cls = + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_CAUSED_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept adjacent(Semantics concept) { + Collection cls = + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.IS_ADJACENT_TO_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept compresent(Semantics concept) { + Collection cls = + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_COMPRESENT_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public Concept relativeTo(Semantics concept) { + Collection cls = + this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.IS_COMPARED_TO_PROPERTY)); + return cls.isEmpty() ? null : cls.iterator().next(); + } + + @Override + public String displayLabel(Semantics concept) { + String ret = displayName(concept); + if (!ret.contains(" ")) { + ret = StringUtils.capitalize(CamelCase.toLowerCase(ret, ' ')); + } + return ret; + } + + @Override + public String displayName(Semantics semantics) { + return semantics instanceof Observable + ? observableDisplayName((Observable) semantics) + : conceptDisplayName(semantics.asConcept()); + } + + private String conceptDisplayName(Concept t) { + + String ret = t.getMetadata().get(NS.DISPLAY_LABEL_PROPERTY, String.class); + + if (ret == null) { + ret = t.getMetadata().get(Metadata.DC_LABEL, String.class); + } + if (ret == null) { + ret = t.getName(); + } + if (ret.startsWith("i")) { + ret = ret.substring(1); + } + return ret; + } + + private String observableDisplayName(Observable o) { + + StringBuilder ret = new StringBuilder(conceptDisplayName(o.asConcept())); + + for (Pair operator : o.getValueOperators()) { + + ret.append(StringUtils.capitalize(operator.getFirst().declaration.replace(' ', '_'))); + + if (operator.getSecond() instanceof KimConcept concept) { + ret.append(conceptDisplayName(declareConcept(concept))); + } else if (operator.getSecond() instanceof KimObservable observable) { + ret.append(observableDisplayName(declareObservable(observable))); + } else { + ret.append("_").append(operator.getSecond().toString().replace(' ', '_')); + } + } + return ret.toString(); + } + + @Override + public String style(Concept concept) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Capabilities capabilities(Scope scope) { + + var ret = new ReasonerCapabilitiesImpl(); + + ret.setWorldviewId(worldview == null ? null : worldview.getWorldviewId()); + ret.setLocalName(localName); + ret.setType(Type.REASONER); + ret.setUrl(getUrl()); + ret.setServerId(hardwareSignature == null ? null : ("REASONER_" + hardwareSignature)); + ret.setServiceId(configuration.getServiceId()); + ret.setServiceName("Reasoner"); + ret.setBrokerURI( + (embeddedBroker != null && embeddedBroker.isOnline()) + ? embeddedBroker.getURI() + : configuration.getBrokerURI()); + ret.getExportSchemata().putAll(ResourceTransport.INSTANCE.getExportSchemata()); + ret.getImportSchemata().putAll(ResourceTransport.INSTANCE.getImportSchemata()); + + ret.setAvailableMessagingQueues( + Utils.URLs.isLocalHost(getUrl()) + ? EnumSet.of( + Message.Queue.Info, + Message.Queue.Errors, + Message.Queue.Warnings, + Message.Queue.Events) + : EnumSet.noneOf(Message.Queue.class)); + return ret; + } + + @Override + public String serviceId() { + return configuration.getServiceId(); + } + + @Override + public Collection identities(Semantics concept) { + return this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_IDENTITY_PROPERTY)); + } + + @Override + public Collection attributes(Semantics concept) { + return this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_ATTRIBUTE_PROPERTY)); + } + + @Override + public Collection realms(Semantics concept) { + return this.owl.getRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_REALM_PROPERTY)); + } + + @Override + public Concept baseParentTrait(Semantics trait) { + + String orig = trait.getMetadata().get(CoreOntology.NS.ORIGINAL_TRAIT, String.class); + if (orig != null) { + trait = this.owl.getConcept(orig); + } - return ret; + /* + * there should only be one of these or none. + */ + if (trait.getMetadata().get(NS.BASE_DECLARATION) != null) { + return (Concept) trait; + } - } catch (Throwable e) { - monitor.error(e, concept); - } - return null; + for (Concept c : parents(trait)) { + Concept r = baseParentTrait(c); + if (r != null) { + return r; + } } - private Concept buildInternal(final KimConceptStatement concept, Ontology ontology, - KimConceptStatement kimObject, final Scope monitor) { + return null; + } - Concept main = null; - String mainId = concept.getUrn(); + @Override + public boolean hasDirectTrait(Semantics type, Concept trait) { - ontology.add(Axiom.ClassAssertion(mainId, - concept.getType().stream().map((c) -> SemanticType.valueOf(c.name())).collect(Collectors.toSet()))); + for (Concept c : directTraits(type)) { + if (is(trait, c)) { + return true; + } + } - // set the k.IM definition - ontology.add(Axiom.AnnotationAssertion(mainId, NS.CONCEPT_DEFINITION_PROPERTY, - ontology.getName() + ":" + concept.getUrn())); + return false; + } - // and the reference name - ontology.add(Axiom.AnnotationAssertion(mainId, NS.REFERENCE_NAME_PROPERTY, - OWL.getCleanFullId(ontology.getName(), concept.getUrn()))); + @Override + public boolean hasDirectRole(Semantics type, Concept trait) { + for (Concept c : directRoles(type)) { + if (is(trait, c)) { + return true; + } + } + return false; + } + + @Override + public Collection directTraits(Semantics concept) { + Set ret = new HashSet<>(); + ret.addAll( + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_REALM_PROPERTY))); + ret.addAll( + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_IDENTITY_PROPERTY))); + ret.addAll( + this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_ATTRIBUTE_PROPERTY))); + return ret; + } + + @Override + public Collection directAttributes(Semantics concept) { + return this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_ATTRIBUTE_PROPERTY)); + } + + @Override + public Collection directIdentities(Semantics concept) { + return this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_IDENTITY_PROPERTY)); + } + + @Override + public Collection directRealms(Semantics concept) { + return this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_REALM_PROPERTY)); + } + + @Override + public Concept negated(Concept concept) { + return this.owl.makeNegation(concept.asConcept(), this.owl.getOntology(concept.getNamespace())); + } + + @Override + public SemanticType observableType(Semantics observable, boolean acceptTraits) { + if (observable instanceof Observable + && ((Observable) observable).getArtifactType().equals(Artifact.Type.VOID)) { + return SemanticType.NOTHING; + } + Set type = EnumSet.copyOf(observable.asConcept().getType()); + type.retainAll(SemanticType.BASE_MODELABLE_TYPES); + if (type.size() != 1) { + throw new IllegalArgumentException( + "trying to extract the observable type from non-observable " + observable); + } + return type.iterator().next(); + } + + @Override + public Concept relationshipSource(Semantics relationship) { + Collection ret = relationshipSources(relationship); + return ret.size() == 0 ? null : ret.iterator().next(); + } + + @Override + public Collection relationshipSources(Semantics relationship) { + return org.integratedmodelling.common.utils.Utils.Collections.join( + this.owl.getDirectRestrictedClasses( + relationship.asConcept(), this.owl.getProperty(NS.IMPLIES_SOURCE_PROPERTY)), + this.owl.getRestrictedClasses( + relationship.asConcept(), this.owl.getProperty(NS.IMPLIES_SOURCE_PROPERTY))); + } + + @Override + public Concept relationshipTarget(Semantics relationship) { + Collection ret = relationshipTargets(relationship); + return ret.size() == 0 ? null : ret.iterator().next(); + } + + @Override + public Collection relationshipTargets(Semantics relationship) { + return org.integratedmodelling.common.utils.Utils.Collections.join( + this.owl.getDirectRestrictedClasses( + relationship.asConcept(), this.owl.getProperty(NS.IMPLIES_DESTINATION_PROPERTY)), + this.owl.getRestrictedClasses( + relationship.asConcept(), this.owl.getProperty(NS.IMPLIES_DESTINATION_PROPERTY))); + } + + @Override + public boolean satisfiable(Semantics ret) { + return this.owl.isSatisfiable(ret); + } + + @Override + public Collection applicableObservables(Concept main) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Collection directRoles(Semantics concept) { + return this.owl.getDirectRestrictedClasses( + concept.asConcept(), this.owl.getProperty(NS.HAS_ROLE_PROPERTY)); + } + + @Override + public ResourceSet loadKnowledge(Worldview worldview, Scope scope) { + + List ret = new ArrayList<>(); + + scope = getScopeManager().collectMessagePayload(scope, Notification.class, ret); + + if (worldview.isEmpty()) { + return ResourceSet.empty(); + } + + this.worldview = worldview; + + this.owl.initialize(worldview.getOntologies().getFirst()); + for (KimOntology ontology : worldview.getOntologies()) { + for (var statement : ontology.getStatements()) { + defineConcept(statement, scope); + } + this.owl.registerWithReasoner(ontology); + } + this.owl.flushReasoner(); + for (var strategyDocument : worldview.getObservationStrategies()) { + for (var strategy : strategyDocument.getStatements()) { + observationReasoner.registerStrategy(strategy); + } + } + observationReasoner.initializeStrategies(); + + // assess consistent status + this.consistent.set(Utils.Notifications.hasErrors(ret)); + + return Utils.Resources.createFromLexicalNotifications(ret); + } + + @Override + public synchronized ResourceSet updateKnowledge(ResourceSet changes, UserScope scope) { + + var ownResources = scope.getService(ResourcesService.class); + Map services = new HashMap<>(); + + serviceScope().setMaintenanceMode(true); + + // delete caches + this.concepts.clear(); + this.observables.clear(); + + boolean inconsistent = false; + + try { + /* + release all ontologies first. This should not be necessary but it prevents a NPE in case there are + forward references - which the syntax should flag as errors, but doesn't at the moment. + */ + for (var resource : changes.getOntologies()) { + var ontology = this.owl.getOntology(resource.getResourceUrn()); + if (ontology != null) { + this.owl.releaseOntology(ontology); + } + } + + for (var resource : changes.getOntologies()) { + + var resourceService = ownResources; + if (!resourceService.capabilities(scope).getServiceId().equals(resource.getServiceId())) { + resourceService = + services.computeIfAbsent( + changes.getServices().get(resource.getServiceId()), + url -> + new ResourcesClient( + url, scope.getIdentity(), this, settingsForSlaveServices)); + } - if (concept.getType().contains(SemanticType.NOTHING)) { - monitor.error("Declaration is inconsistent or uses unknown concepts", concept); - return null; + var notifications = new ArrayList(); + var parsingScope = + getScopeManager().collectMessagePayload(scope, Notification.class, notifications); + var ontology = resourceService.resolveOntology(resource.getResourceUrn(), parsingScope); + for (var statement : ontology.getStatements()) { + defineConcept(statement, parsingScope); } + this.owl.registerWithReasoner(ontology); + resource.getNotifications().addAll(notifications); - if (concept.getDocstring() != null) { - ontology.add(Axiom.AnnotationAssertion(mainId, Vocabulary.RDFS_COMMENT, concept.getDocstring())); + if (Utils.Notifications.hasErrors(notifications)) { + inconsistent = true; } + } - if (kimObject == null) { - ontology.add(Axiom.AnnotationAssertion(mainId, NS.BASE_DECLARATION, "true")); + for (var resource : changes.getObservationStrategies()) { + + var resourceService = ownResources; + if (!resourceService.capabilities(scope).getServiceId().equals(resource.getServiceId())) { + resourceService = + services.computeIfAbsent( + changes.getServices().get(resource.getServiceId()), + url -> + new ResourcesClient( + url, scope.getIdentity(), this, settingsForSlaveServices)); } - /* - * basic attributes subjective deniable internal uni/bidirectional - * (relationship) - */ - if (concept.isAbstract() || concept.getNamespace().equals(CoreOntology.CORE_ONTOLOGY_NAME)) { - ontology.add(Axiom.AnnotationAssertion(mainId, CoreOntology.NS.IS_ABSTRACT, "true")); + var notifications = new ArrayList(); + var parsingScope = + getScopeManager().collectMessagePayload(scope, Notification.class, notifications); + var observationStrategyDocument = + resourceService.resolveObservationStrategyDocument( + resource.getResourceUrn(), parsingScope); + + observationReasoner.releaseNamespace(observationStrategyDocument.getUrn()); + for (var strategy : observationStrategyDocument.getStatements()) { + observationReasoner.registerStrategy(strategy); } + observationReasoner.initializeStrategies(); - ontology.define(); - main = ontology.getConcept(mainId); + resource.getNotifications().addAll(notifications); + } + } catch (Throwable t) { + inconsistent = true; + scope.send(Notification.error(t)); + } finally { + serviceScope().setMaintenanceMode(false); + } - indexer.index(concept); + if (inconsistent) { + this.consistent.set(false); + } - if (concept.getDeclaredParent() != null) { + return changes; + } - // List concepts = new ArrayList<>(); - // for (KimConcept pdecl : parent.getConcepts()) { - Concept declared = declare(concept.getDeclaredParent(), ontology, monitor); - if (declared == null) { - monitor.error("parent declaration " + concept.getDeclaredParent().getUrn() + " does not " + - "identify " + "known " + "concepts", concept.getDeclaredParent()); - return null; - } else { - ontology.add(Axiom.SubClass(declared.getNamespace() + ":" + declared.getName(), mainId)); - } - // concepts.add(declared); - // } - // - // if (concepts.size() == 1) { - // - // } - /* else { - Concept expr = null; - switch (parent.getConnector()) { - case INTERSECTION: - expr = this.owl.getIntersection(concepts, ontology, concepts.get(0).getType()); - break; - case UNION: - expr = this.owl.getUnion(concepts, ontology, concepts.get(0).getType()); - break; - case FOLLOWS: - expr = this.owl.getConsequentialityEvent(concepts, ontology); - break; - default: - // won't happen - break; - } - if (concept.isAlias()) { - ontology.addDelegateConcept(mainId, ontology.getName(), expr); - } else { - ontology.add(Axiom.SubClass(expr.getNamespace() + ":" + expr.getName(), mainId)); - } - }*/ - ontology.define(); - } + public void setLocalName(String localName) { + this.localName = localName; + } - for (var child : concept.getChildren()) { - try { - // KimConceptStatement chobj = kimObject == null ? null : new - // KimConceptStatement((IKimConceptStatement) child); - Concept childConcept = buildInternal((KimConceptStatement) child, ontology, concept, - /* - * monitor instanceof ErrorNotifyingMonitor ? ((ErrorNotifyingMonitor) - * monitor).contextualize(child) : - */ monitor); - if (childConcept != null) { - ontology.add(Axiom.SubClass(mainId, childConcept.getName())); - ontology.define(); - } - // kimObject.getChildren().add(chobj); - } catch (Throwable e) { - monitor.error(e, child); - } - } + @Override + public boolean is(Semantics concept, Semantics other) { - for (KimConcept inherited : concept.getTraitsInherited()) { - Concept trait = declare(inherited, ontology, monitor); - if (trait == null) { - monitor.error("inherited " + inherited.getName() + " does not identify " + - "known concepts", - inherited); - // return null; - } else { - this.owl.addTrait(main, trait, ontology); - } - } + if (concept == other || concept.equals(other)) { + return true; + } - // TODO all the rest: creates, .... - for (KimConcept affected : concept.getQualitiesAffected()) { - Concept quality = declare(affected, ontology, monitor); - if (quality == null) { - monitor.error("affected " + affected.getName() + " does not identify " + - "known concepts", - affected); - } else { - this.owl.restrictSome(main, this.owl.getProperty(CoreOntology.NS.AFFECTS_PROPERTY), quality - , ontology); - } - } + if (concept.asConcept().isCollective() != other.asConcept().isCollective()) { + return false; + } - for (KimConcept required : concept.getRequiredIdentities()) { - Concept quality = declare(required, ontology, monitor); - if (quality == null) { - monitor.error("required " + required.getName() + " does not identify " + - "known concepts", - required); - } else { - this.owl.restrictSome(main, this.owl.getProperty(NS.REQUIRES_IDENTITY_PROPERTY), quality, - ontology); - } - } + /* + * first use "isn't" based on the enum types to quickly cut out those that don't + * match. Also works with concepts in different ontologies that have the same + * definition. + */ + if (inWorldview(concept, other)) { + var fundamentalType = SemanticType.fundamentalType(other.asConcept().getType()); + if (fundamentalType != null + && !Sets.intersection(concept.asConcept().getType(), other.asConcept().getType()) + .contains(fundamentalType)) { + return false; + } + } - for (KimConcept affected : concept.getObservablesCreated()) { - Concept quality = declare(affected, ontology, monitor); - if (quality == null) { - monitor.error("created " + affected.getName() + " does not identify known" + - " concepts", - affected); - } else { - this.owl.restrictSome(main, this.owl.getProperty(NS.CREATES_PROPERTY), quality, ontology); - } - } + /* + * TODO this would be a good point to insert caching logics. It should also go + * in all remote clients. + */ - for (ApplicableConcept link : concept.getSubjectsLinked()) { - if (link.getOriginalObservable() == null && link.getSource() != null) { - // relationship source->target - this.owl.defineRelationship(main, declare(link.getSource(), ontology, monitor), - declare(link.getTarget(), ontology, monitor), ontology); - } else { - // TODO - } - } + /* + * Speed up checking for logical expressions without forcing the reasoner to + * compute complex logics. + */ + if (concept.is(SemanticType.UNION)) { + + for (Concept c : operands(concept)) { + if (is(c, other)) { + return true; + } + } + + } else if (concept.is(SemanticType.INTERSECTION)) { + + for (Concept c : operands(concept)) { + if (!is(c, other)) { + return false; + } + } + return true; + + } else { + /* + * use the semantic closure. We may want to cache this eventually. + */ + Collection collection = allParents(concept); + collection.add(concept.asConcept()); + return collection.contains(other); + } + return false; + } + + @Override + public Semantics domain(Semantics conceptImpl) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Concept declareConcept(KimConcept conceptDeclaration) { + return declare( + conceptDeclaration, this.owl.requireOntology(conceptDeclaration.getNamespace()), scope); + } + + @Override + public Observable declareObservable(KimObservable observableDeclaration) { + return declare( + observableDeclaration, + this.owl.requireOntology(observableDeclaration.getSemantics().getNamespace()), + scope); + } + + @Override + public Observable declareObservable( + KimObservable observableDeclaration, Map patternVariables) { + + if (observableDeclaration.getPattern() == null) { + return declareObservable(observableDeclaration); + } + String urn = observableDeclaration.getPattern(); + for (var key : observableDeclaration.getPatternVariables()) { + var value = patternVariables.get(key); + if (value == null) { + return null; + } + String valueCode = + switch (value) { + case KimConcept kimConcept -> /*"(" + */ kimConcept.getUrn() /* + ")"*/; + case KimObservable kimConcept -> /*"(" + */ kimConcept.getUrn() /*+ ")"*/; + case Concept kimConcept -> /*"(" + */ kimConcept.getUrn() /* + ")"*/; + case Observable kimConcept -> /*"(" + */ kimConcept.getUrn() /* + ")"*/; + case String string -> "\"" + Utils.Escape.forDoubleQuotedString(string, false) + "\""; + default -> value.toString(); + }; + urn = urn.replace("$:" + key, valueCode); + } + return resolveObservable(urn); + } + + @Override + public Concept declareConcept( + KimConcept observableDeclaration, Map patternVariables) { + + if (!observableDeclaration.isPattern()) { + return declareConcept(observableDeclaration); + } + String urn = observableDeclaration.getUrn(); + for (var key : observableDeclaration.getPatternVariables()) { + var value = patternVariables.get(key); + if (value == null) { + return null; + } + String valueCode = + switch (value) { + case KimConcept kimConcept -> "(" + kimConcept.getUrn() + ")"; + case KimObservable kimConcept -> "(" + kimConcept.getUrn() + ")"; + case Concept kimConcept -> "(" + kimConcept.getUrn() + ")"; + case Observable kimConcept -> "(" + kimConcept.getUrn() + ")"; + case String string -> "\"" + Utils.Escape.forDoubleQuotedString(string, false) + "\""; + default -> value.toString(); + }; + urn = urn.replace("$:" + key, valueCode); + } + return resolveConcept(urn); + } - if (!concept.getEmergenceTriggers().isEmpty()) { - List triggers = new ArrayList<>(); - for (KimConcept trigger : concept.getEmergenceTriggers()) { - triggers.add(declare(trigger, ontology, monitor)); - } - registerEmergent(main, triggers); - } + @Override + public boolean compatible(Semantics o1, Semantics o2) { + return compatible(o1, o2, 0); + } - // if (kimObject != null) { - // kimObject.set(main); - // } + // @Override + public boolean compatible(Semantics o1, Semantics o2, int flags) { - return main; + if (o1 == o2 || o1.equals(o2)) { + return true; } + boolean mustBeSameCoreType = (flags & REQUIRE_SAME_CORE_TYPE) != 0; + boolean useRoleParentClosure = (flags & USE_ROLE_PARENT_CLOSURE) != 0; + // boolean acceptRealmDifferences = (flags & ACCEPT_REALM_DIFFERENCES) != 0; + + // TODO unsupported + boolean useTraitParentClosure = (flags & USE_TRAIT_PARENT_CLOSURE) != 0; + /** - * Arrange a set of concepts into the collection of the most specific members of each concept hierarchy - * therein. - *

- * TODO/FIXME not exposed, as I'm not sure this one is useful or intuitive - * enough. - * - * @param cc - * @return least general + * The check of fundamental types is only performed when both concepts are inside the worldview. */ - public Collection leastGeneral(Collection cc) { - - Set ret = new HashSet<>(); - for (Concept c : cc) { - List ccs = new ArrayList<>(ret); - boolean set = false; - for (Concept kn : ccs) { - if (is(c, kn)) { - ret.remove(kn); - ret.add(c); - set = true; - } else if (is(kn, c)) { - set = true; - } - } - if (!set) { - ret.add(c); - } - } - return ret; + if (inWorldview(o1, o2)) { + if ((!o1.is(SemanticType.OBSERVABLE) || !o2.is(SemanticType.OBSERVABLE)) + && !(o1.is(SemanticType.CONFIGURATION) && o2.is(SemanticType.CONFIGURATION))) { + return false; + } } /** - * Return the most specific ancestor that the concepts in the passed collection have in common, or null if - * none. - * - * @param cc - * @return + * first compatibility check is a simple subsumption if o1 is abstract, or a full core + * observability check if not. */ - @Override - public Concept leastGeneralCommon(Collection cc) { + if (o2.isAbstract()) { - Concept ret = null; - Iterator ii = cc.iterator(); - - if (ii.hasNext()) { + if (is(o2, o1)) { + return false; + } - ret = ii.next(); + } else { - if (ret != null) while (ii.hasNext()) { - ret = this.owl.getLeastGeneralCommonConcept(ret, ii.next()); - if (ret == null) break; - } - } + Concept core1 = coreObservable(o1); + Concept core2 = coreObservable(o2); - return ret; + if (core1 == null + || core2 == null + || !(mustBeSameCoreType ? core1.equals(core2) : is(core1, core2))) { + return false; + } } - /* - * Register the triggers and each triggering concept in the emergence map. - */ - public boolean registerEmergent(Concept configuration, Collection triggers) { + Concept ic1 = inherent(o1); + Concept ic2 = inherent(o2); - if (!configuration.isAbstract()) { + // same with inherency + if (ic1 == null && ic2 != null) { + return false; + } + if (ic1 != null && ic2 != null) { + if (!compatible(ic1, ic2)) { + return false; + } + } - // DebugFile.println("CHECK for storage of " + configuration + " based on " + - // triggers); + for (Concept t : traits(o2)) { + boolean ok = hasTrait(o1, t); + if (!ok && useTraitParentClosure) { + ok = hasDirectTrait(o1, t); + } + if (!ok) { + return false; + } + } - if (this.emergent.containsKey(configuration)) { - return true; - } + for (Concept t : roles(o2)) { + boolean ok = hasRole(o1, t); + if (!ok && useRoleParentClosure) { + ok = hasParentRole(o1, t); + } + if (!ok) { + return false; + } + } + + return true; + } + + /** + * True if the concept comes from a loaded worldview. The alternative is that it comes from a core + * imported ontology, and possibly (in the future) from a conceptual extent ontology. + * + * @param semantics + * @return + */ + private boolean inWorldview(Semantics... semantics) { + for (Object o : semantics) { + if (switch (o) { + case ConceptImpl concept -> concept.getType().isEmpty(); + case KimConceptImpl concept -> concept.getType().isEmpty(); + case ObservableImpl observable -> observable.getSemantics().getType().isEmpty(); + case KimObservableImpl observable -> observable.getSemantics().getType().isEmpty(); + default -> false; + }) { + return false; + } + } + return true; + } + + @Override + public boolean hasParentRole(Semantics o1, Concept t) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean contextuallyCompatible(Semantics focus, Semantics context1, Semantics context2) { + boolean ret = compatible(context1, context2, 0); + if (!ret && occurrent(context1)) { + ret = affectedBy(focus, context1); + Concept itsContext = inherent(context1); + if (!ret) { + if (itsContext != null) { + ret = compatible(itsContext, context2); + } + } + } + return ret; + } + + @Override + public boolean occurrent(Semantics context1) { + // TODO Auto-generated method stub + return false; + } + + @Override + public Collection affectedOrCreated(Semantics semantics) { + Set ret = new HashSet<>(); + for (Concept c : + this.owl.getRestrictedClasses( + semantics.asConcept(), this.owl.getProperty(NS.AFFECTS_PROPERTY))) { + if (!this.owl.getOntology(c.getNamespace()).isInternal()) { + ret.add(c); + } + } + for (Concept c : + this.owl.getRestrictedClasses( + semantics.asConcept(), this.owl.getProperty(NS.CREATES_PROPERTY))) { + if (!this.owl.getOntology(c.getNamespace()).isInternal()) { + ret.add(c); + } + } + return ret; + } + + @Override + public Collection affected(Semantics semantics) { + Set ret = new HashSet<>(); + for (Concept c : + this.owl.getRestrictedClasses( + semantics.asConcept(), this.owl.getProperty(NS.AFFECTS_PROPERTY))) { + if (!this.owl.getOntology(c.getNamespace()).isInternal()) { + ret.add(c); + } + } + return ret; + } + + @Override + public Collection created(Semantics semantics) { + Set ret = new HashSet<>(); + for (Concept c : + this.owl.getRestrictedClasses( + semantics.asConcept(), this.owl.getProperty(NS.CREATES_PROPERTY))) { + if (!this.owl.getOntology(c.getNamespace()).isInternal()) { + ret.add(c); + } + } + return ret; + } + + @Override + public boolean match(Semantics candidate, Semantics pattern) { + return syntacticMatcher.match(candidate, pattern); + } + + @Override + public boolean match(Semantics candidate, Semantics pattern, Map matches) { + return false; + } + + @Override + public T concretize(T pattern, Map concreteConcepts) { + return null; + } + + @Override + public T concretize(T pattern, List concreteConcepts) { + return null; + } + + @Override + public boolean affectedBy(Semantics affected, Semantics affecting) { + Concept described = describedType(affected); + for (Concept c : affected(affecting)) { + if (is(affected, c) || (described != null && is(described, c))) { + return true; + } + } + return false; + } - // DebugFile.println(" STORED " + configuration); - - Emergence descriptor = new Emergence(); - descriptor.emergentObservable = configuration; - descriptor.triggerObservables.addAll(triggers); - descriptor.namespaceId = configuration.getNamespace(); - this.emergent.put(configuration, descriptor); - - for (Concept trigger : triggers) { - for (Concept tr : this.owl.flattenOperands(trigger)) { - Set es = emergence.get(tr); - if (es == null) { - es = new HashSet<>(); - emergence.put(tr, es); - } - es.add(descriptor); - } - } + @Override + public boolean createdBy(Semantics affected, Semantics affecting) { + Concept described = describedType(affected); + if (described != null && is(described, affecting)) { + return true; + } + for (Concept c : created(affecting)) { + if (is(affected, c) || (described != null && is(described, c))) { + return true; + } + } + return false; + } - return true; - } + @Override + public Concept baseObservable(Semantics c) { - return false; + if (c instanceof Concept concept) { + return concept; } - private void createProperties(Concept ret, Ontology ns) { - - String pName = null; - String pProp = null; - if (ret.is(SemanticType.ATTRIBUTE)) { - // hasX - pName = "has" + ret.getName(); - pProp = NS.HAS_ATTRIBUTE_PROPERTY; - } else if (ret.is(SemanticType.REALM)) { - // inX - pName = "in" + ret.getName(); - pProp = NS.HAS_REALM_PROPERTY; - } else if (ret.is(SemanticType.IDENTITY)) { - // isX - pName = "is" + ret.getName(); - pProp = NS.HAS_IDENTITY_PROPERTY; - } - if (pName != null) { - ns.add(Axiom.ObjectPropertyAssertion(pName)); - ns.add(Axiom.ObjectPropertyRange(pName, ret.getName())); - ns.add(Axiom.SubObjectProperty(pProp, pName)); - ns.add(Axiom.AnnotationAssertion(ret.getName(), NS.TRAIT_RESTRICTING_PROPERTY, ns.getName() + - ":" + pName)); - } + Collection traits = directTraits(c); + Collection roles = directRoles(c); + if (traits.size() == 0 && roles.size() == 0 && derived(c)) { + return c.asConcept(); } - private Concept declare(KimConcept concept, Ontology ontology, Scope monitor) { - return declareInternal(concept, ontology, monitor); - } + return baseObservable(parent(c)); + } - private Concept declareInternal(KimConcept concept, Ontology ontology, Scope monitor) { + @Override + public Concept parent(Semantics c) { + Collection parents = this.owl.getParents(c.asConcept()); + return parents.isEmpty() ? null : parents.iterator().next(); + } - Concept existing = concepts.get(concept.getUrn()); - if (existing != null) { - return existing; - } + @Override + public Concept compose(Collection concepts, LogicalConnector connector) { + + if (connector == LogicalConnector.EXCLUSION || connector == LogicalConnector.DISJOINT_UNION) { + throw new KlabIllegalArgumentException( + "Reasoner::compose: connector " + connector + " not " + "supported"); + } + if (concepts.size() == 1) { + return concepts.iterator().next(); + } + if (concepts.size() > 1) { + return connector == LogicalConnector.UNION + ? this.owl.getUnion( + concepts, + this.owl.getOntology(concepts.iterator().next().getNamespace()), + concepts.iterator().next().getType()) + : this.owl.getIntersection( + concepts, + this.owl.getOntology(concepts.iterator().next().getNamespace()), + concepts.iterator().next().getType()); + } + return owl.getNothing(); + } - Concept main = null; + @Override + public Concept rawObservable(Semantics observable) { + String def = observable.getMetadata().get(NS.CORE_OBSERVABLE_PROPERTY, String.class); + Concept ret = observable.asConcept(); + if (def != null) { + ret = resolveConcept(def); + } + return ret; + } - if (concept.getObservable() != null) { - main = declareInternal(concept.getObservable(), ontology, monitor); - } else if (concept.getName() != null) { - main = this.owl.getConcept(concept.getName()); - } + @Override + public Builder observableBuilder(Observable observableImpl) { + return ObservableBuilder.getBuilder(observableImpl, scope, this); + } - if (main == null) { - return null; - } + /* + * --- non-API + */ - ObservableBuilder builder = - new ObservableBuilder(main, ontology, monitor, this).withDeclaration(concept); + /* + * Record correspondence of core concept peers to worldview concepts. Called by + * KimValidator for later use at namespace construction. + */ + public void setWorldviewPeer(String coreConcept, String worldviewConcept) { + coreConceptPeers.put(worldviewConcept, coreConcept); + } - builder.collective(concept.isCollective()); + public Concept build( + KimConceptStatement concept, + Ontology ontology, + KimConceptStatement kimObject, + Scope monitor) { - if (concept.getSemanticModifier() != null) { - Concept other = null; - if (concept.getComparisonConcept() != null) { - other = declareInternal(concept.getComparisonConcept(), ontology, monitor); - } - builder.as(concept.getSemanticModifier(), other == null ? (Concept[]) null : - new Concept[]{other}); - } + try { - // if (concept.getDistributedInherent() != null) { - // builder.withDistributedInherency(true); - // } + if (concept.isAlias() || concept.getUpperConceptDefined() != null) { /* - * transformations first + * can only have 'is' or 'equals' X; for core concepts 'is' means 'equals', and we use the + * statement to establish the semantic type. */ - - if (concept.getInherent() != null) { - Concept c = declareInternal(concept.getInherent(), ontology, monitor); - if (c != null) { - builder.of(c); - } - } - // if (concept.getContext() != null) { - // Concept c = declareInternal(concept.getContext(), ontology, monitor); - // if (c != null) { - // if (SemanticRole.CONTEXT.equals(concept.getDistributedInherent())) { - // builder.of(c); - // } else { - // builder.within(c); - // } - // } - // } - if (concept.getCompresent() != null) { - Concept c = declareInternal(concept.getCompresent(), ontology, monitor); - if (c != null) { - builder.with(c); - } - } - if (concept.getCausant() != null) { - Concept c = declareInternal(concept.getCausant(), ontology, monitor); - if (c != null) { - builder.from(c); - } + Concept parent = null; + if (concept.getUpperConceptDefined() != null) { + parent = this.owl.getConcept(concept.getUpperConceptDefined()); + if (parent == null) { + monitor.error( + "Core concept " + concept.getUpperConceptDefined() + " is unknown", concept); + } else { + parent.getType().addAll(concept.getType()); + } + } else if (concept.getDeclaredParent() != null) { + parent = declareConcept(concept.getDeclaredParent()); } - if (concept.getCaused() != null) { - Concept c = declareInternal(concept.getCaused(), ontology, monitor); - if (c != null) { - builder.to(c); - } - } - if (concept.getGoal() != null) { - Concept c = declareInternal(concept.getGoal(), ontology, monitor); - if (c != null) { - // if (SemanticRole.GOAL.equals(concept.getDistributedInherent())) { - // builder.of(c); - // } else { - builder.withGoal(c); - // } - } - } - if (concept.getCooccurrent() != null) { - Concept c = declareInternal(concept.getCooccurrent(), ontology, monitor); - if (c != null) { - builder.withCooccurrent(c); - } + + if (parent != null) { + ontology.addDelegateConcept(concept.getUrn(), ontology.getName(), parent); } - if (concept.getAdjacent() != null) { - Concept c = declareInternal(concept.getAdjacent(), ontology, monitor); - if (c != null) { - builder.withAdjacent(c); + + return null; + } + + Concept ret = buildInternal(concept, ontology, kimObject, monitor); + + if (ret != null) { + + Concept upperConceptDefined = null; + if (concept.getDeclaredParent() == null) { + Concept parent = null; + if (concept.getUpperConceptDefined() != null) { + upperConceptDefined = parent = this.owl.getConcept(concept.getUpperConceptDefined()); + if (parent == null) { + monitor.error( + "Core concept " + concept.getUpperConceptDefined() + " is " + "unknown", concept); } - } - if (concept.getRelationshipSource() != null) { - Concept source = declareInternal(concept.getRelationshipSource(), ontology, monitor); - Concept target = declareInternal(concept.getRelationshipTarget(), ontology, monitor); - if (source != null && target != null) { - builder.linking(source, target); + } else { + parent = this.owl.getCoreOntology().getCoreType(concept.getType()); + if (coreConceptPeers.containsKey(ret.toString())) { + // ensure that any non-trivial core inheritance is dealt with + // appropriately + parent = this.owl.getCoreOntology().alignCoreInheritance(ret); } + } + if (parent != null) { + ontology.add( + Axiom.SubClass(parent.getNamespace() + ":" + parent.getName(), ret.getName())); + } } - for (KimConcept c : concept.getTraits()) { - Concept trait = declareInternal(c, ontology, monitor); - if (trait != null) { - builder.withTrait(trait); - } - } + createProperties(ret, ontology); + ontology.define(); - for (KimConcept c : concept.getRoles()) { - Concept role = declareInternal(c, ontology, monitor); - if (role != null) { - builder.withRole(role); - } + if (coreConceptPeers.containsKey(ret.toString()) && upperConceptDefined != null + /* && "true".equals(upperConceptDefined.getMetadata().get(NS.IS_CORE_KIM_TYPE, + "false")*/ ) { + // TODO revise - use core ontology statements only + this.owl.getCoreOntology().setAsCoreType(ret); } + } - Concept ret = null; - try { - - ret = builder.buildConcept(); - - /* - * handle unions and intersections - */ - if (!concept.getOperands().isEmpty()) { - List concepts = new ArrayList<>(); - concepts.add(ret); - for (KimConcept op : concept.getOperands()) { - concepts.add(declareInternal(op, ontology, monitor)); - } - ret = concept.getExpressionType() == KimConcept.Expression.INTERSECTION ? - this.owl.getIntersection(concepts, ontology, concept.getOperands().get(0).getType()) - : - this.owl.getUnion(concepts, ontology, concept.getOperands().get(0).getType()); - } + return ret; - // set the k.IM definition in the concept.This must only happen if the - // concept wasn't there - within build() and repeat if mods are made - if (builder.axiomsAdded()) { + } catch (Throwable e) { + monitor.error(e, concept); + } + return null; + } - this.owl.getOntology(ret.getNamespace()).define(Collections.singletonList(Axiom.AnnotationAssertion(ret.getName(), NS.CONCEPT_DEFINITION_PROPERTY, concept.getUrn()))); + private Concept buildInternal( + final KimConceptStatement concept, + Ontology ontology, + KimConceptStatement kimObject, + final Scope monitor) { - // consistency check - if (!satisfiable(ret)) { - ret.getType().add(SemanticType.NOTHING); - monitor.error("the definition of this concept has logical errors and " + - "is inconsistent", - concept); - } + Concept main = null; + String mainId = concept.getUrn(); - /** - * Now that the URN is set, put away the description - */ - registerConcept(ret); - } + ontology.add( + Axiom.ClassAssertion( + mainId, + concept.getType().stream() + .map((c) -> SemanticType.valueOf(c.name())) + .collect(Collectors.toSet()))); - } catch (Throwable e) { - monitor.error(e, concept); - } + // set the k.IM definition + ontology.add( + Axiom.AnnotationAssertion( + mainId, NS.CONCEPT_DEFINITION_PROPERTY, ontology.getName() + ":" + concept.getUrn())); - if (concept.isNegated()) { - ret = negated(ret); - } + // and the reference name + ontology.add( + Axiom.AnnotationAssertion( + mainId, + NS.REFERENCE_NAME_PROPERTY, + OWL.getCleanFullId(ontology.getName(), concept.getUrn()))); - /** - * TODO/CHECK Save the declaration, including the source code which could have a - * different order - */ - if (ret != null) { - concepts.put(ret.getUrn(), ret); - } + if (concept.getType().contains(SemanticType.NOTHING)) { + monitor.error("Declaration is inconsistent or uses unknown concepts", concept); + return null; + } - return ret; + if (concept.getDocstring() != null) { + ontology.add( + Axiom.AnnotationAssertion(mainId, Vocabulary.RDFS_COMMENT, concept.getDocstring())); } - public Observable declare(KimObservable concept, Ontology declarationOntology, Scope monitor) { + if (kimObject == null) { + ontology.add(Axiom.AnnotationAssertion(mainId, NS.BASE_DECLARATION, "true")); + } - if (concept.getNonSemanticType() != null) { - Concept nsmain = this.owl.getNonsemanticPeer(concept.getModelReference(), - concept.getNonSemanticType()); - ObservableImpl observable = ObservableImpl.promote(nsmain, scope); - // observable.setModelReference(concept.getModelReference()); - observable.setName(concept.getFormalName()); - observable.setStatedName(concept.getFormalName()); - observable.setReferenceName(concept.getFormalName()); - return observable; - } + /* + * basic attributes subjective deniable internal uni/bidirectional + * (relationship) + */ + if (concept.isAbstract() || concept.getNamespace().equals(CoreOntology.CORE_ONTOLOGY_NAME)) { + ontology.add(Axiom.AnnotationAssertion(mainId, CoreOntology.NS.IS_ABSTRACT, "true")); + } - Concept main = declareInternal(concept.getSemantics(), declarationOntology, monitor); - if (main == null) { - return null; - } + ontology.define(); + main = ontology.getConcept(mainId); - Concept observable = main; + indexer.index(concept); - Observable.Builder builder = new ObservableBuilder(observable, monitor, this); + if (concept.getDeclaredParent() != null) { - // ret.setUrl(concept.getURI()); - // builder.withUrl(concept.getURI()); + // List concepts = new ArrayList<>(); + // for (KimConcept pdecl : parent.getConcepts()) { + Concept declared = declare(concept.getDeclaredParent(), ontology, monitor); + if (declared == null) { + monitor.error( + "parent declaration " + + concept.getDeclaredParent().getUrn() + + " does not " + + "identify " + + "known " + + "concepts", + concept.getDeclaredParent()); + return null; + } else { + ontology.add(Axiom.SubClass(declared.getNamespace() + ":" + declared.getName(), mainId)); + } + // concepts.add(declared); + // } + // + // if (concepts.size() == 1) { + // + // } + /* else { + Concept expr = null; + switch (parent.getConnector()) { + case INTERSECTION: + expr = this.owl.getIntersection(concepts, ontology, concepts.get(0).getType()); + break; + case UNION: + expr = this.owl.getUnion(concepts, ontology, concepts.get(0).getType()); + break; + case FOLLOWS: + expr = this.owl.getConsequentialityEvent(concepts, ontology); + break; + default: + // won't happen + break; + } + if (concept.isAlias()) { + ontology.addDelegateConcept(mainId, ontology.getName(), expr); + } else { + ontology.add(Axiom.SubClass(expr.getNamespace() + ":" + expr.getName(), mainId)); + } + }*/ + ontology.define(); + } + + for (var child : concept.getChildren()) { + try { + // KimConceptStatement chobj = kimObject == null ? null : new + // KimConceptStatement((IKimConceptStatement) child); + Concept childConcept = + buildInternal((KimConceptStatement) child, ontology, concept, /* + * monitor instanceof ErrorNotifyingMonitor ? ((ErrorNotifyingMonitor) + * monitor).contextualize(child) : + */ monitor); + if (childConcept != null) { + ontology.add(Axiom.SubClass(mainId, childConcept.getName())); + ontology.define(); + } + // kimObject.getChildren().add(chobj); + } catch (Throwable e) { + monitor.error(e, child); + } + } + + for (KimConcept inherited : concept.getTraitsInherited()) { + Concept trait = declare(inherited, ontology, monitor); + if (trait == null) { + monitor.error( + "inherited " + inherited.getName() + " does not identify " + "known concepts", + inherited); + // return null; + } else { + this.owl.addTrait(main, trait, ontology); + } + } + + // TODO all the rest: creates, .... + for (KimConcept affected : concept.getQualitiesAffected()) { + Concept quality = declare(affected, ontology, monitor); + if (quality == null) { + monitor.error( + "affected " + affected.getName() + " does not identify " + "known concepts", affected); + } else { + this.owl.restrictSome( + main, this.owl.getProperty(CoreOntology.NS.AFFECTS_PROPERTY), quality, ontology); + } + } + + for (KimConcept required : concept.getRequiredIdentities()) { + Concept quality = declare(required, ontology, monitor); + if (quality == null) { + monitor.error( + "required " + required.getName() + " does not identify " + "known concepts", required); + } else { + this.owl.restrictSome( + main, this.owl.getProperty(NS.REQUIRES_IDENTITY_PROPERTY), quality, ontology); + } + } + + for (KimConcept affected : concept.getObservablesCreated()) { + Concept quality = declare(affected, ontology, monitor); + if (quality == null) { + monitor.error( + "created " + affected.getName() + " does not identify known" + " concepts", affected); + } else { + this.owl.restrictSome(main, this.owl.getProperty(NS.CREATES_PROPERTY), quality, ontology); + } + } + + for (ApplicableConcept link : concept.getSubjectsLinked()) { + if (link.getOriginalObservable() == null && link.getSource() != null) { + // relationship source->target + this.owl.defineRelationship( + main, + declare(link.getSource(), ontology, monitor), + declare(link.getTarget(), ontology, monitor), + ontology); + } else { + // TODO + } + } + + if (!concept.getEmergenceTriggers().isEmpty()) { + List triggers = new ArrayList<>(); + for (KimConcept trigger : concept.getEmergenceTriggers()) { + triggers.add(declare(trigger, ontology, monitor)); + } + registerEmergent(main, triggers); + } + + // if (kimObject != null) { + // kimObject.set(main); + // } - boolean unitsSet = false; + return main; + } + + /** + * Arrange a set of concepts into the collection of the most specific members of each concept + * hierarchy therein. + * + *

TODO/FIXME not exposed, as I'm not sure this one is useful or intuitive enough. + * + * @param cc + * @return least general + */ + public Collection leastGeneral(Collection cc) { + + Set ret = new HashSet<>(); + for (Concept c : cc) { + List ccs = new ArrayList<>(ret); + boolean set = false; + for (Concept kn : ccs) { + if (is(c, kn)) { + ret.remove(kn); + ret.add(c); + set = true; + } else if (is(kn, c)) { + set = true; + } + } + if (!set) { + ret.add(c); + } + } + return ret; + } + + /** + * Return the most specific ancestor that the concepts in the passed collection have in common, or + * null if none. + * + * @param cc + * @return + */ + @Override + public Concept leastGeneralCommon(Collection cc) { + + Concept ret = null; + Iterator ii = cc.iterator(); + + if (ii.hasNext()) { + + ret = ii.next(); + + if (ret != null) + while (ii.hasNext()) { + ret = this.owl.getLeastGeneralCommonConcept(ret, ii.next()); + if (ret == null) break; + } + } + + return ret; + } + + /* + * Register the triggers and each triggering concept in the emergence map. + */ + public boolean registerEmergent(Concept configuration, Collection triggers) { + + if (!configuration.isAbstract()) { + + // DebugFile.println("CHECK for storage of " + configuration + " based on " + + // triggers); + + if (this.emergent.containsKey(configuration)) { + return true; + } - if (concept.getUnit() != null) { - unitsSet = true; - builder = builder.withUnit(concept.getUnit()); - } + // DebugFile.println(" STORED " + configuration); - if (concept.getCurrency() != null) { - unitsSet = true; - builder = builder.withCurrency(concept.getCurrency()); - } + Emergence descriptor = new Emergence(); + descriptor.emergentObservable = configuration; + descriptor.triggerObservables.addAll(triggers); + descriptor.namespaceId = configuration.getNamespace(); + this.emergent.put(configuration, descriptor); - if (concept.getValue() != null) { - Object value = concept.getValue(); - if (value instanceof KimConcept) { - value = declareConcept((KimConcept) value); - } - builder = builder.withInlineValue(value); + for (Concept trigger : triggers) { + for (Concept tr : this.owl.flattenOperands(trigger)) { + Set es = emergence.get(tr); + if (es == null) { + es = new HashSet<>(); + emergence.put(tr, es); + } + es.add(descriptor); } + } - if (concept.getDefaultValue() != null) { - Object value = concept.getValue(); - if (value instanceof KimConcept) { - value = declareConcept((KimConcept) value); - } - builder = builder.withDefaultValue(value); - } + return true; + } - for (var exc : concept.getResolutionExceptions()) { - builder = builder.withResolutionException(exc); - } + return false; + } - if (concept.getRange() != null) { - builder = builder.withRange(concept.getRange()); - } + private void createProperties(Concept ret, Ontology ns) { - builder = builder.optional(concept.isOptional()).generic(concept.isGeneric())/* .global(concept - .isGlobal()) */.named(concept.getFormalName()); + String pName = null; + String pProp = null; + if (ret.is(SemanticType.ATTRIBUTE)) { + // hasX + pName = "has" + ret.getName(); + pProp = NS.HAS_ATTRIBUTE_PROPERTY; + } else if (ret.is(SemanticType.REALM)) { + // inX + pName = "in" + ret.getName(); + pProp = NS.HAS_REALM_PROPERTY; + } else if (ret.is(SemanticType.IDENTITY)) { + // isX + pName = "is" + ret.getName(); + pProp = NS.HAS_IDENTITY_PROPERTY; + } + if (pName != null) { + ns.add(Axiom.ObjectPropertyAssertion(pName)); + ns.add(Axiom.ObjectPropertyRange(pName, ret.getName())); + ns.add(Axiom.SubObjectProperty(pProp, pName)); + ns.add( + Axiom.AnnotationAssertion( + ret.getName(), NS.TRAIT_RESTRICTING_PROPERTY, ns.getName() + ":" + pName)); + } + } - // TODO gather generic concepts and abstract ones - // if (concept.isExclusive()) { - // builder = builder.withResolution(Observable.Resolution.Only); - // } else if (concept.isGlobal()) { - // builder = builder.withResolution(Observable.Resolution.All); - // } else if (concept.isGeneric()) { - // builder = builder.withResolution(Observable.Resolution.Any); - // } + private Concept declare(KimConcept concept, Ontology ontology, Scope monitor) { + return declareInternal(concept, ontology, monitor); + } - for (var operator : concept.getValueOperators()) { - builder = builder.withValueOperator(operator.getFirst(), operator.getSecond()); - } + private Concept declareInternal(KimConcept concept, Ontology ontology, Scope monitor) { - for (var annotation : concept.getAnnotations()) { - builder = builder.withAnnotation(new AnnotationImpl(annotation)); - } + Concept existing = concepts.get(concept.getUrn()); + if (existing != null) { + return existing; + } - // CHECK: fluidUnits = needsUnits() && !unitsSet; + Concept main = null; - return (Observable) builder.build(); + if (concept.getObservable() != null) { + main = declareInternal(concept.getObservable(), ontology, monitor); + } else if (concept.getName() != null) { + main = this.owl.getConcept(concept.getName()); } - public void registerConcept(Concept thing) { - this.concepts.put(thing.getUrn(), thing); + if (main == null) { + return null; } - @Override - public Collection rolesFor(Concept observable, Concept context) { - // TODO Auto-generated method stub - return null; + ObservableBuilder builder = + new ObservableBuilder(main, ontology, monitor, this).withDeclaration(concept); + + builder.collective(concept.isCollective()); + + if (concept.getSemanticModifier() != null) { + Concept other = null; + if (concept.getComparisonConcept() != null) { + other = declareInternal(concept.getComparisonConcept(), ontology, monitor); + } + builder.as( + concept.getSemanticModifier(), other == null ? (Concept[]) null : new Concept[] {other}); } - @Override - public Concept impliedRole(Concept baseRole, Concept contextObservable) { - // TODO Auto-generated method stub - return null; + // if (concept.getDistributedInherent() != null) { + // builder.withDistributedInherency(true); + // } + + /* + * transformations first + */ + + if (concept.getInherent() != null) { + Concept c = declareInternal(concept.getInherent(), ontology, monitor); + if (c != null) { + builder.of(c); + } + } + // if (concept.getContext() != null) { + // Concept c = declareInternal(concept.getContext(), ontology, monitor); + // if (c != null) { + // if (SemanticRole.CONTEXT.equals(concept.getDistributedInherent())) { + // builder.of(c); + // } else { + // builder.within(c); + // } + // } + // } + if (concept.getCompresent() != null) { + Concept c = declareInternal(concept.getCompresent(), ontology, monitor); + if (c != null) { + builder.with(c); + } + } + if (concept.getCausant() != null) { + Concept c = declareInternal(concept.getCausant(), ontology, monitor); + if (c != null) { + builder.from(c); + } + } + if (concept.getCaused() != null) { + Concept c = declareInternal(concept.getCaused(), ontology, monitor); + if (c != null) { + builder.to(c); + } + } + if (concept.getGoal() != null) { + Concept c = declareInternal(concept.getGoal(), ontology, monitor); + if (c != null) { + // if (SemanticRole.GOAL.equals(concept.getDistributedInherent())) { + // builder.of(c); + // } else { + builder.withGoal(c); + // } + } + } + if (concept.getCooccurrent() != null) { + Concept c = declareInternal(concept.getCooccurrent(), ontology, monitor); + if (c != null) { + builder.withCooccurrent(c); + } + } + if (concept.getAdjacent() != null) { + Concept c = declareInternal(concept.getAdjacent(), ontology, monitor); + if (c != null) { + builder.withAdjacent(c); + } + } + if (concept.getRelationshipSource() != null) { + Concept source = declareInternal(concept.getRelationshipSource(), ontology, monitor); + Concept target = declareInternal(concept.getRelationshipTarget(), ontology, monitor); + if (source != null && target != null) { + builder.linking(source, target); + } } - @Override - public Collection impliedRoles(Concept role, boolean includeRelationshipEndpoints) { - // TODO Auto-generated method stub - return null; + for (KimConcept c : concept.getTraits()) { + Concept trait = declareInternal(c, ontology, monitor); + if (trait != null) { + builder.withTrait(trait); + } } - /** - * Entry point of a semantic search. If the request has a new searchId, start a new SemanticExpression and - * keep it until timeout or completion. - * - * @param request - */ - @Override - public SemanticSearchResponse semanticSearch(SemanticSearchRequest request) { + for (KimConcept c : concept.getRoles()) { + Concept role = declareInternal(c, ontology, monitor); + if (role != null) { + builder.withRole(role); + } + } - var response = new SemanticSearchResponse(request.getSearchId(), request.getRequestId()); + Concept ret = null; + try { - if (request.isCancelSearch()) { - semanticExpressions.invalidate(request.getSearchId()); - } else { + ret = builder.buildConcept(); - switch (request.getSearchMode()) { - case UNDO: - - // client may be stupid, as mine is - var expression = semanticExpressions.getIfPresent(request.getSearchId()); - if (expression != null) { - boolean ok = true; - if (!expression.undo()) { - semanticExpressions.invalidate(request.getSearchId()); - ok = false; - } - - response.setSearchId(ok ? request.getSearchId() : null); - if (ok) { - response.getErrors().addAll(expression.getErrors()); - response.getCode().addAll(expression.getStyledCode()); - response.setCurrentType(expression.getObservableType()); - } - } else { - response.getErrors().add("Timeout during search"); - } - break; - - case OPEN_SCOPE: - - expression = semanticExpressions.getIfPresent(response.getSearchId()); - if (expression != null) { - expression.accept("("); - response.setSearchId(request.getSearchId()); - response.getErrors().addAll(expression.getErrors()); - response.getCode().addAll(expression.getStyledCode()); - response.setCurrentType(expression.getObservableType()); - } else { - response.getErrors().add("Timeout during search"); - } - - break; - - case CLOSE_SCOPE: - - expression = semanticExpressions.getIfPresent(response.getSearchId()); - if (expression != null) { - expression.accept(")"); - response.getErrors().addAll(expression.getErrors()); - response.getCode().addAll(expression.getStyledCode()); - response.setCurrentType(expression.getObservableType()); - } else { - response.getErrors().add("Timeout during search"); - } - break; - - case TOKEN: - - expression = semanticExpressions.getIfPresent(response.getSearchId()); - if (expression == null) { - expression = SemanticExpression.create(scope); - semanticExpressions.put(response.getSearchId(), expression); - } else { - response.getErrors().add("Timeout during search"); - } - - for (var match : indexer.query(request.getQueryString(), - expression.getCurrent().getScope(), request.getMaxResults())) { - response.getMatches().add(match); - } - - // save the matches in the expression so that we recognize a choice - expression.setData("matches", response); - - break; - } + /* + * handle unions and intersections + */ + if (!concept.getOperands().isEmpty()) { + List concepts = new ArrayList<>(); + concepts.add(ret); + for (KimConcept op : concept.getOperands()) { + concepts.add(declareInternal(op, ontology, monitor)); } + ret = + concept.getExpressionType() == KimConcept.Expression.INTERSECTION + ? this.owl.getIntersection( + concepts, ontology, concept.getOperands().get(0).getType()) + : this.owl.getUnion(concepts, ontology, concept.getOperands().get(0).getType()); + } - response.setElapsedTimeMs(System.currentTimeMillis() - response.getElapsedTimeMs()); - return response; + // set the k.IM definition in the concept.This must only happen if the + // concept wasn't there - within build() and repeat if mods are made + if (builder.axiomsAdded()) { - } + this.owl + .getOntology(ret.getNamespace()) + .define( + Collections.singletonList( + Axiom.AnnotationAssertion( + ret.getName(), NS.CONCEPT_DEFINITION_PROPERTY, concept.getUrn()))); - @Override - public boolean shutdown() { + // consistency check + if (!satisfiable(ret)) { + ret.getType().add(SemanticType.NOTHING); + monitor.error( + "the definition of this concept has logical errors and " + "is inconsistent", + concept); + } + + /** Now that the URN is set, put away the description */ + registerConcept(ret); + } - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceUnavailable, - capabilities(serviceScope())); - // TODO Auto-generated method stub - return super.shutdown(); + } catch (Throwable e) { + monitor.error(e, concept); } - @Override - public List computeObservationStrategies(Observation observation, - ContextScope scope) { - return observationReasoner.computeMatchingStrategies(observation, scope); + if (concept.isNegated()) { + ret = negated(ret); } - @Override - public Collection collectComponents(Concept concept, Collection types) { - Set ret = new HashSet<>(); - KimConcept peer = scope.getService(ResourcesService.class).resolveConcept(concept.getUrn()); - peer.visit(new Statement.Visitor() { - @Override - public void visitAnnotation(Annotation annotation) { - // TODO - } + /** + * TODO/CHECK Save the declaration, including the source code which could have a different order + */ + if (ret != null) { + concepts.put(ret.getUrn(), ret); + } - @Override - public void visitStatement(Statement statement) { - // TODO call the method below with the ref if we have it - } + return ret; + } - // @Override - public void visitReference(String conceptName, Set type, KimConcept validParent) { - Concept cn = resolveConcept(conceptName); - if (cn != null && Sets.intersection(type, - org.integratedmodelling.common.utils.Utils.Collections.asSet(types)).size() == types.size()) { - ret.add(cn); - } - } - }); - return ret; + public Observable declare(KimObservable concept, Ontology declarationOntology, Scope monitor) { + + if (concept.getNonSemanticType() != null) { + Concept nsmain = + this.owl.getNonsemanticPeer(concept.getModelReference(), concept.getNonSemanticType()); + ObservableImpl observable = ObservableImpl.promote(nsmain, scope); + // observable.setModelReference(concept.getModelReference()); + observable.setName(concept.getFormalName()); + observable.setStatedName(concept.getFormalName()); + observable.setReferenceName(concept.getFormalName()); + return observable; } - @Override - public Concept replaceComponent(Concept original, Map replacements) { + Concept main = declareInternal(concept.getSemantics(), declarationOntology, monitor); + if (main == null) { + return null; + } - /* - * TODO this is the original lexical replacement, which is risky and incomplete. - * This should use a specialized visitor to rebuild the concept piecewise from a - * modified KimConcept. - */ + Concept observable = main; - if (replacements.isEmpty()) { - return original; - } + Observable.Builder builder = new ObservableBuilder(observable, monitor, this); - String declaration = original.getUrn(); - for (Concept key : replacements.keySet()) { - String rep = replacements.get(key).toString(); - if (rep.contains(" ")) { - rep = "(" + rep + ")"; - } - declaration = declaration.replace(key.getUrn(), rep); - } + // ret.setUrl(concept.getURI()); + // builder.withUrl(concept.getURI()); - return declareConcept(scope.getService(ResourcesService.class).resolveConcept(declaration)); + boolean unitsSet = false; + + if (concept.getUnit() != null) { + unitsSet = true; + builder = builder.withUnit(concept.getUnit()); } + if (concept.getCurrency() != null) { + unitsSet = true; + builder = builder.withCurrency(concept.getCurrency()); + } - @Override - public Concept buildConcept(ObservableBuildStrategy builder) { - Observable.Builder ret = new ObservableBuilder(builder.getBaseObservable(), scope, this); - ret = defineBuilder(builder, ret); - return ret.buildConcept(); + if (concept.getValue() != null) { + Object value = concept.getValue(); + if (value instanceof KimConcept) { + value = declareConcept((KimConcept) value); + } + builder = builder.withInlineValue(value); } - @Override - public Observable buildObservable(ObservableBuildStrategy builder) { - Observable.Builder ret = new ObservableBuilder(builder.getBaseObservable(), scope, this); - ret = defineBuilder(builder, ret); - return ret.build(); - } - - private Observable.Builder defineBuilder(ObservableBuildStrategy builder, Observable.Builder ret) { - for (ObservableBuildStrategy.Operation op : builder.getOperations()) { - switch (op.getType()) { - case OF -> { - ret = ret.of(op.getConcepts().get(0)); - } - case WITH -> { - ret = ret.with(op.getConcepts().get(0)); - } - // case WITHIN -> { - // ret = ret.within(op.getConcepts().get(0)); - // } - case GOAL -> { - ret = ret.withGoal(op.getConcepts().get(0)); - } - case FROM -> { - ret = ret.from(op.getConcepts().get(0)); - } - case TO -> { - ret = ret.to(op.getConcepts().get(0)); - } - case WITH_ROLE -> { - ret = ret.withRole(op.getConcepts().get(0)); - } - case AS -> { - ret = ret.as(op.getOperator(), op.getConcepts().toArray(new Concept[0])); - } - case WITH_TRAITS -> { - ret = ret.withTrait(op.getConcepts().toArray(new Concept[0])); - } - case WITHOUT -> { - ret = ret.without(op.getConcepts().toArray(new Concept[0])); - } - case WITHOUT_ANY_TYPES -> { - ret = ret.withoutAny(op.getTypes().toArray(new SemanticType[0])); - } - case WITHOUT_ANY_CONCEPTS -> { - ret = ret.withoutAny(op.getConcepts().toArray(new Concept[0])); - } - case ADJACENT -> { - ret = ret.withAdjacent(op.getConcepts().get(0)); - } - case COOCCURRENT -> { - ret = ret.withCooccurrent(op.getConcepts().get(0)); - } - case WITH_UNIT -> { - ret = ret.withUnit(op.getUnit()); - } - case WITH_CURRENCY -> { - ret = ret.withCurrency(op.getCurrency()); - } - case WITH_RANGE -> { - ret = ret.withRange(op.getRange()); - } - case WITH_VALUE_OPERATOR -> { - ret = ret.withValueOperator(op.getValueOperation().getFirst(), - op.getValueOperation().getSecond()); - } - case LINKING -> { - ret = ret.linking(op.getConcepts().get(0), op.getConcepts().get(1)); - } - case NAMED -> { - ret = ret.named((String) op.getPod()); - } - // case WITH_DISTRIBUTED_INHERENCY -> { - // ret = ret.withDistributedInherency(op.getPod().get(Boolean.class)); - // } - case WITHOUT_VALUE_OPERATORS -> { - ret = ret.withoutValueOperators(); - } - case AS_OPTIONAL -> { - ret = ret.optional((Boolean) op.getPod()); - } - case WITHOUT_ROLES -> { - ret = ret.without(op.getRoles().toArray(new SemanticRole[0])); - } - case WITH_TEMPORAL_INHERENT -> { - ret = ret.withTemporalInherent(op.getConcepts().get(0)); - } - // case WITH_DEREIFIED_ATTRIBUTE -> { - // ret = ret.withDereifiedAttribute(op.getPod().get(String.class)); - // } - case REFERENCE_NAMED -> { - ret = ret.withReferenceName((String) op.getPod()); - } - case WITH_INLINE_VALUE -> { - ret = ret.withInlineValue(op.getPod()); - } - case COLLECTIVE -> { - ret = ret.collective((Boolean) op.getPod()); - } - case WITH_DEFAULT_VALUE -> { - ret = ret.withDefaultValue(op.getPod()); - } - case WITH_RESOLUTION_EXCEPTION -> { - ret = ret.withResolutionException(op.getResolutionException()); - } - case AS_GENERIC -> { - ret = ret.generic((Boolean) op.getPod()); - } - case WITH_ANNOTATION -> { - for (Annotation annotation : op.getAnnotations()) { - ret = ret.withAnnotation(annotation); - } - } - case AS_DESCRIPTION_TYPE -> { - ret = ret.as(op.getDescriptionType()); - } - default -> - throw new KlabUnimplementedException("ReasonerService::defineBuilder: unhandled " + "operation " + op.getType()); - } - } - return ret; + if (concept.getDefaultValue() != null) { + Object value = concept.getValue(); + if (value instanceof KimConcept) { + value = declareConcept((KimConcept) value); + } + builder = builder.withDefaultValue(value); } - @Override - public boolean exportNamespace(String namespace, File directory) { - return this.owl.exportOntology(namespace, directory); + for (var exc : concept.getResolutionExceptions()) { + builder = builder.withResolutionException(exc); } - /** - * Replicate a remote scope in the scope manager. This should be called by the runtime service after - * creating it so if the scope has no ID we issue an error, as we do not create independent scopes. - * - * @param sessionScope a client scope that should record the ID for future communication. If the ID is - * null, the call has failed. - * @return - */ - @Override - public String registerSession(SessionScope sessionScope) { + if (concept.getRange() != null) { + builder = builder.withRange(concept.getRange()); + } - if (sessionScope instanceof ServiceSessionScope serviceSessionScope) { + builder = + builder + .optional(concept.isOptional()) + .generic(concept.isGeneric()) /* .global(concept + .isGlobal()) */ + .named(concept.getFormalName()); - if (sessionScope.getId() == null) { - throw new KlabIllegalArgumentException("resolver: session scope has no ID, cannot register " + - "a scope autonomously"); - } + // TODO gather generic concepts and abstract ones + // if (concept.isExclusive()) { + // builder = builder.withResolution(Observable.Resolution.Only); + // } else if (concept.isGlobal()) { + // builder = builder.withResolution(Observable.Resolution.All); + // } else if (concept.isGeneric()) { + // builder = builder.withResolution(Observable.Resolution.Any); + // } - getScopeManager().registerScope(serviceSessionScope, capabilities(sessionScope).getBrokerURI()); - return serviceSessionScope.getId(); - } + for (var operator : concept.getValueOperators()) { + builder = builder.withValueOperator(operator.getFirst(), operator.getSecond()); + } - throw new KlabIllegalArgumentException("unexpected scope class"); + for (var annotation : concept.getAnnotations()) { + builder = builder.withAnnotation(new AnnotationImpl(annotation)); } - /** - * Replicate a remote scope in the scope manager. This should be called by the runtime service after - * creating it so if the scope has no ID we issue an error, as we do not create independent scopes. - * - * @param contextScope a client scope that should record the ID for future communication. If the ID is - * null, the call has failed. - * @return - */ - @Override - public String registerContext(ContextScope contextScope) { + // CHECK: fluidUnits = needsUnits() && !unitsSet; - if (contextScope instanceof ServiceContextScope serviceContextScope) { + return (Observable) builder.build(); + } + + public void registerConcept(Concept thing) { + this.concepts.put(thing.getUrn(), thing); + } + + @Override + public Collection rolesFor(Concept observable, Concept context) { + // TODO Auto-generated method stub + return null; + } - if (contextScope.getId() == null) { - throw new KlabIllegalArgumentException("resolver: context scope has no ID, cannot register " + - "a scope autonomously"); - } + @Override + public Concept impliedRole(Concept baseRole, Concept contextObservable) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Collection impliedRoles(Concept role, boolean includeRelationshipEndpoints) { + // TODO Auto-generated method stub + return null; + } - getScopeManager().registerScope(serviceContextScope, capabilities(contextScope).getBrokerURI()); - return serviceContextScope.getId(); - } + /** + * Entry point of a semantic search. If the request has a new searchId, start a new + * SemanticExpression and keep it until timeout or completion. + * + * @param request + */ + @Override + public SemanticSearchResponse semanticSearch(SemanticSearchRequest request) { + + var response = new SemanticSearchResponse(request.getSearchId(), request.getRequestId()); + + if (request.isCancelSearch()) { + semanticExpressions.invalidate(request.getSearchId()); + } else { + + switch (request.getSearchMode()) { + case UNDO: + + // client may be stupid, as mine is + var expression = semanticExpressions.getIfPresent(request.getSearchId()); + if (expression != null) { + boolean ok = true; + if (!expression.undo()) { + semanticExpressions.invalidate(request.getSearchId()); + ok = false; + } + + response.setSearchId(ok ? request.getSearchId() : null); + if (ok) { + response.getErrors().addAll(expression.getErrors()); + response.getCode().addAll(expression.getStyledCode()); + response.setCurrentType(expression.getObservableType()); + } + } else { + response.getErrors().add("Timeout during search"); + } + break; + + case OPEN_SCOPE: + expression = semanticExpressions.getIfPresent(response.getSearchId()); + if (expression != null) { + expression.accept("("); + response.setSearchId(request.getSearchId()); + response.getErrors().addAll(expression.getErrors()); + response.getCode().addAll(expression.getStyledCode()); + response.setCurrentType(expression.getObservableType()); + } else { + response.getErrors().add("Timeout during search"); + } + + break; + + case CLOSE_SCOPE: + expression = semanticExpressions.getIfPresent(response.getSearchId()); + if (expression != null) { + expression.accept(")"); + response.getErrors().addAll(expression.getErrors()); + response.getCode().addAll(expression.getStyledCode()); + response.setCurrentType(expression.getObservableType()); + } else { + response.getErrors().add("Timeout during search"); + } + break; + + case TOKEN: + expression = semanticExpressions.getIfPresent(response.getSearchId()); + if (expression == null) { + expression = SemanticExpression.create(scope); + semanticExpressions.put(response.getSearchId(), expression); + } else { + response.getErrors().add("Timeout during search"); + } + + for (var match : + indexer.query( + request.getQueryString(), + expression.getCurrent().getScope(), + request.getMaxResults())) { + response.getMatches().add(match); + } + + // save the matches in the expression so that we recognize a choice + expression.setData("matches", response); + + break; + } + } + + response.setElapsedTimeMs(System.currentTimeMillis() - response.getElapsedTimeMs()); + return response; + } + + @Override + public boolean shutdown() { + + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceUnavailable, + capabilities(serviceScope())); + // TODO Auto-generated method stub + return super.shutdown(); + } + + @Override + public List computeObservationStrategies( + Observation observation, ContextScope scope) { + return observationReasoner.computeMatchingStrategies(observation, scope); + } + + @Override + public Collection collectComponents(Concept concept, Collection types) { + Set ret = new HashSet<>(); + KimConcept peer = scope.getService(ResourcesService.class).resolveConcept(concept.getUrn()); + peer.visit( + new Statement.Visitor() { + @Override + public void visitAnnotation(Annotation annotation) { + // TODO + } + + @Override + public void visitStatement(Statement statement) { + // TODO call the method below with the ref if we have it + } + + // @Override + public void visitReference( + String conceptName, Set type, KimConcept validParent) { + Concept cn = resolveConcept(conceptName); + if (cn != null + && Sets.intersection( + type, + org.integratedmodelling.common.utils.Utils.Collections.asSet(types)) + .size() + == types.size()) { + ret.add(cn); + } + } + }); + return ret; + } + + @Override + public Concept replaceComponent(Concept original, Map replacements) { + + /* + * TODO this is the original lexical replacement, which is risky and incomplete. + * This should use a specialized visitor to rebuild the concept piecewise from a + * modified KimConcept. + */ - throw new KlabIllegalArgumentException("unexpected scope class"); + if (replacements.isEmpty()) { + return original; + } + String declaration = original.getUrn(); + for (Concept key : replacements.keySet()) { + String rep = replacements.get(key).toString(); + if (rep.contains(" ")) { + rep = "(" + rep + ")"; + } + declaration = declaration.replace(key.getUrn(), rep); } + + return declareConcept(scope.getService(ResourcesService.class).resolveConcept(declaration)); + } + + @Override + public Concept buildConcept(ObservableBuildStrategy builder) { + Observable.Builder ret = new ObservableBuilder(builder.getBaseObservable(), scope, this); + ret = defineBuilder(builder, ret); + return ret.buildConcept(); + } + + @Override + public Observable buildObservable(ObservableBuildStrategy builder) { + Observable.Builder ret = new ObservableBuilder(builder.getBaseObservable(), scope, this); + ret = defineBuilder(builder, ret); + return ret.build(); + } + + private Observable.Builder defineBuilder( + ObservableBuildStrategy builder, Observable.Builder ret) { + for (ObservableBuildStrategy.Operation op : builder.getOperations()) { + switch (op.getType()) { + case OF -> { + ret = ret.of(op.getConcepts().get(0)); + } + case WITH -> { + ret = ret.with(op.getConcepts().get(0)); + } + // case WITHIN -> { + // ret = ret.within(op.getConcepts().get(0)); + // } + case GOAL -> { + ret = ret.withGoal(op.getConcepts().get(0)); + } + case FROM -> { + ret = ret.from(op.getConcepts().get(0)); + } + case TO -> { + ret = ret.to(op.getConcepts().get(0)); + } + case WITH_ROLE -> { + ret = ret.withRole(op.getConcepts().get(0)); + } + case AS -> { + ret = ret.as(op.getOperator(), op.getConcepts().toArray(new Concept[0])); + } + case WITH_TRAITS -> { + ret = ret.withTrait(op.getConcepts().toArray(new Concept[0])); + } + case WITHOUT -> { + ret = ret.without(op.getConcepts().toArray(new Concept[0])); + } + case WITHOUT_ANY_TYPES -> { + ret = ret.withoutAny(op.getTypes().toArray(new SemanticType[0])); + } + case WITHOUT_ANY_CONCEPTS -> { + ret = ret.withoutAny(op.getConcepts().toArray(new Concept[0])); + } + case ADJACENT -> { + ret = ret.withAdjacent(op.getConcepts().get(0)); + } + case COOCCURRENT -> { + ret = ret.withCooccurrent(op.getConcepts().get(0)); + } + case WITH_UNIT -> { + ret = ret.withUnit(op.getUnit()); + } + case WITH_CURRENCY -> { + ret = ret.withCurrency(op.getCurrency()); + } + case WITH_RANGE -> { + ret = ret.withRange(op.getRange()); + } + case WITH_VALUE_OPERATOR -> { + ret = + ret.withValueOperator( + op.getValueOperation().getFirst(), op.getValueOperation().getSecond()); + } + case LINKING -> { + ret = ret.linking(op.getConcepts().get(0), op.getConcepts().get(1)); + } + case NAMED -> { + ret = ret.named((String) op.getPod()); + } + // case WITH_DISTRIBUTED_INHERENCY -> { + // ret = ret.withDistributedInherency(op.getPod().get(Boolean.class)); + // } + case WITHOUT_VALUE_OPERATORS -> { + ret = ret.withoutValueOperators(); + } + case AS_OPTIONAL -> { + ret = ret.optional((Boolean) op.getPod()); + } + case WITHOUT_ROLES -> { + ret = ret.without(op.getRoles().toArray(new SemanticRole[0])); + } + case WITH_TEMPORAL_INHERENT -> { + ret = ret.withTemporalInherent(op.getConcepts().get(0)); + } + // case WITH_DEREIFIED_ATTRIBUTE -> { + // ret = ret.withDereifiedAttribute(op.getPod().get(String.class)); + // } + case REFERENCE_NAMED -> { + ret = ret.withReferenceName((String) op.getPod()); + } + case WITH_INLINE_VALUE -> { + ret = ret.withInlineValue(op.getPod()); + } + case COLLECTIVE -> { + ret = ret.collective((Boolean) op.getPod()); + } + case WITH_DEFAULT_VALUE -> { + ret = ret.withDefaultValue(op.getPod()); + } + case WITH_RESOLUTION_EXCEPTION -> { + ret = ret.withResolutionException(op.getResolutionException()); + } + case AS_GENERIC -> { + ret = ret.generic((Boolean) op.getPod()); + } + case WITH_ANNOTATION -> { + for (Annotation annotation : op.getAnnotations()) { + ret = ret.withAnnotation(annotation); + } + } + case AS_DESCRIPTION_TYPE -> { + ret = ret.as(op.getDescriptionType()); + } + default -> + throw new KlabUnimplementedException( + "ReasonerService::defineBuilder: unhandled " + "operation " + op.getType()); + } + } + return ret; + } + + @Override + public boolean exportNamespace(String namespace, File directory) { + return this.owl.exportOntology(namespace, directory); + } + + /** + * Replicate a remote scope in the scope manager. This should be called by the runtime service + * after creating it so if the scope has no ID we issue an error, as we do not create independent + * scopes. + * + * @param sessionScope a client scope that should record the ID for future communication. If the + * ID is null, the call has failed. + * @return + */ + @Override + public String registerSession(SessionScope sessionScope) { + + if (sessionScope instanceof ServiceSessionScope serviceSessionScope) { + + if (sessionScope.getId() == null) { + throw new KlabIllegalArgumentException( + "resolver: session scope has no ID, cannot register " + "a scope autonomously"); + } + + getScopeManager() + .registerScope(serviceSessionScope, capabilities(sessionScope).getBrokerURI()); + return serviceSessionScope.getId(); + } + + throw new KlabIllegalArgumentException("unexpected scope class"); + } + + /** + * Replicate a remote scope in the scope manager. This should be called by the runtime service + * after creating it so if the scope has no ID we issue an error, as we do not create independent + * scopes. + * + * @param contextScope a client scope that should record the ID for future communication. If the + * ID is null, the call has failed. + * @return + */ + @Override + public String registerContext(ContextScope contextScope) { + + if (contextScope instanceof ServiceContextScope serviceContextScope) { + + if (contextScope.getId() == null) { + throw new KlabIllegalArgumentException( + "resolver: context scope has no ID, cannot register " + "a scope autonomously"); + } + + getScopeManager() + .registerScope(serviceContextScope, capabilities(contextScope).getBrokerURI()); + return serviceContextScope.getId(); + } + + throw new KlabIllegalArgumentException("unexpected scope class"); + } } diff --git a/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/SemanticMatcher.java b/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/SemanticMatcher.java new file mode 100644 index 000000000..68a177ade --- /dev/null +++ b/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/SemanticMatcher.java @@ -0,0 +1,388 @@ +package org.integratedmodelling.klab.services.reasoner; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import java.util.*; +import org.integratedmodelling.klab.api.collections.Pair; +import org.integratedmodelling.klab.api.collections.Triple; +import org.integratedmodelling.klab.api.knowledge.Concept; +import org.integratedmodelling.klab.api.knowledge.SemanticType; +import org.integratedmodelling.klab.api.knowledge.Semantics; +import org.integratedmodelling.klab.api.services.ResourcesService; + +/** + * Computes semantic distance between concepts, with configurable caching. Clients should also + * provide similar caching to minimize network traffic. + */ +public class SemanticMatcher { + + private ReasonerService reasonerService; + private ResourcesService resourcesService; + + /** + * Cache for non-contextual matching with inherency=true and no abstract predicates incarnation + */ + private final LoadingCache, Integer> binaryMatchCache = + CacheBuilder.newBuilder() + .concurrencyLevel(20) + .maximumSize(400) // TODO configure + .build( + new CacheLoader<>() { + @Override + public Integer load(Pair key) throws Exception { + return computeSemanticDistance(key.getFirst(), key.getSecond()); + } + }); + + /** + * Cache for contextual matching with inherency=true and no abstract predicates incarnation + */ + private final LoadingCache, Integer> ternaryMatchCache = + CacheBuilder.newBuilder() + .concurrencyLevel(20) + .maximumSize(400) // TODO configure + .build( + new CacheLoader<>() { + @Override + public Integer load(Triple key) throws Exception { + return computeSemanticDistance(key.getFirst(), key.getSecond()); + } + }); + + private Integer computeSemanticDistance(Semantics first, Semantics second) { + return semanticDistance(first, second); + } + + public SemanticMatcher(ReasonerService reasonerService, ResourcesService resourcesService) { + this.reasonerService = reasonerService; + this.resourcesService = resourcesService; + } + + // TODO use cache except in special cases + public int semanticDistance(Semantics target, Semantics other) { + return semanticDistance(target.asConcept(), other.asConcept(), null, true, null); + } + + // TODO use cache except in special cases + public int semanticDistance(Semantics target, Semantics other, Semantics context) { + return semanticDistance( + target.asConcept(), + other.asConcept(), + context == null ? null : context.asConcept(), + true, + null); + } + + /** + * The workhorse of semantic distance computation can also consider any predicates that were + * abstract in the lineage of the passed concept (i.e. the concept is the result of a query with + * the abstract predicates, which has been contextualized to incarnate them into the passed + * correspondence with concrete counterparts). In that case, and only in that case, the distance + * between a concrete candidate and one that contains its predicates in the abstract form can be + * positive, i.e. a concept with abstract predicates can resolve one with concrete subclasses as + * long as the lineage contains its resolution. + * + *

Remains public to address special situations when we have abstract resolutions or special + * needs about inherency. + * + * @param to + * @param context + * @param compareInherency + * @param resolvedAbstractPredicates + * @return + */ + public int semanticDistance( + Concept from, + Concept to, + Concept context, + boolean compareInherency, + Map resolvedAbstractPredicates) { + + int distance = 0; + + // String resolving = this.getDefinition(); + // String resolved = concept.getDefinition(); + // System.out.println("Does " + resolving + " resolve " + resolved + "?"); + + int mainDistance = + coreDistance(from, to, context, compareInherency, resolvedAbstractPredicates); + distance += mainDistance * 50; + if (distance < 0) { + return distance; + } + + // should have all the same traits - additional traits are allowed only + // in contextual types + Set acceptedTraits = new HashSet<>(); + for (Concept t : reasonerService.traits(from)) { + if (t.isAbstract() + && resolvedAbstractPredicates != null + && resolvedAbstractPredicates.containsKey(t)) { + distance += assertedDistance(resolvedAbstractPredicates.get(t), t); + acceptedTraits.add(resolvedAbstractPredicates.get(t)); + } else { + boolean ok = reasonerService.hasTrait(to, t); + if (!ok) { + return -50; + } + } + } + + for (Concept t : reasonerService.traits(to)) { + if (!acceptedTraits.contains(t) && !reasonerService.hasTrait(from, t)) { + return -50; + } + } + + // same with roles. + Set acceptedRoles = new HashSet<>(); + for (Concept t : reasonerService.roles(from)) { + if (t.isAbstract() + && resolvedAbstractPredicates != null + && resolvedAbstractPredicates.containsKey(t)) { + distance += assertedDistance(resolvedAbstractPredicates.get(t), t); + acceptedRoles.add(resolvedAbstractPredicates.get(t)); + } else { + boolean ok = reasonerService.hasRole(to, t); + if (!ok) { + return -50; + } + } + } + + for (Concept t : reasonerService.roles(to)) { + if (!acceptedRoles.contains(t) && !reasonerService.hasRole(from, t)) { + return -50; + } + } + + int component; + + if (compareInherency) { + /* + * any EXPLICIT inherency must be the same in both. + */ + Concept ourExplicitInherent = reasonerService.directInherent(from); + Concept itsExplicitInherent = reasonerService.directInherent(to); + + if (ourExplicitInherent != null || itsExplicitInherent != null) { + if (ourExplicitInherent != null && itsExplicitInherent != null) { + component = distance(ourExplicitInherent, itsExplicitInherent, true); + + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + } else { + return -50; + } + } + + /* + * inherency must be same (theirs is ours) unless our inherent type is abstract + */ + Concept ourInherent = reasonerService.inherent(from); + Concept itsInherent = reasonerService.inherent(to); + + if (ourInherent != null || itsInherent != null) { + + if (ourInherent != null && ourInherent.isAbstract()) { + component = distance(ourInherent, itsInherent, false); + } else if (ourInherent == null && itsInherent != null && context != null) { + /* + * Situations like: does XXX resolve YYY of ZZZ when ZZZ is the context. + */ + component = distance(context, itsInherent, false); + } else { + component = distance(itsInherent, ourInherent, false); + } + + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + } + } + + component = distance(reasonerService.goal(from), reasonerService.goal(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + + component = distance(reasonerService.cooccurrent(from), reasonerService.cooccurrent(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + + component = distance(reasonerService.causant(from), reasonerService.causant(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + + component = distance(reasonerService.caused(from), reasonerService.caused(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + + component = distance(reasonerService.adjacent(from), reasonerService.adjacent(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + + component = distance(reasonerService.compresent(from), reasonerService.compresent(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + + component = distance(reasonerService.relativeTo(from), reasonerService.relativeTo(to), false); + if (component < 0) { + double d = ((double) component / 10.0); + return -1 * (int) (d > 10 ? d : 10); + } + distance += component; + + return distance; + } + + /** + * Get the distance between the core described observables after factoring out all operators and + * ensuring they are the same. If not the same, the concepts are incompatible and the distance is + * negative. + * + * @param to + * @return + */ + public int coreDistance( + Concept from, + Concept to, + Concept context, + boolean compareInherency, + Map resolvedAbstractPredicates) { + + if (from == to || from.equals(to)) { + return 0; + } + + Pair> c1ops = reasonerService.splitOperators(from); + Pair> c2ops = reasonerService.splitOperators(to); + + if (!c1ops.getSecond().equals(c2ops.getSecond())) { + return -50; + } + + if (!c1ops.getSecond().isEmpty()) { + /* + * if operators were extracted, the distance must take into account traits and + * the like for the concepts they describe, so call the main method again, which + * will call this and perform the core check below. + */ + return semanticDistance( + c1ops.getFirst(), + c2ops.getFirst(), + context, + compareInherency, + resolvedAbstractPredicates); + } + + Concept core1 = reasonerService.coreObservable(c1ops.getFirst()); + Concept core2 = reasonerService.coreObservable(c2ops.getFirst()); + + /* + * FIXME this must check: have operator ? (operator == operator && coreObs == + * coreObs) : coreObs == coreObs; + */ + + if (core1 == null || core2 == null) { + return -100; + } + + if (!from.is(SemanticType.PREDICATE) && !core1.equals(core2)) { + /* + * in order to resolve an observation, the core observables must be equal; + * subsumption is not OK (lidar elevation does not resolve elevation as it + * creates different observations; same for different observation techniques - + * easy strategy to annotate techs that make measurements incompatible = use a + * subclass instead of a related trait). + * + * Predicates are unique in being able to resolve a more specific predicate. + */ + return -50; + } + + /** + * Previously returning the distance, which does not work unless the core observables are the + * same (differentiated by predicates only) - which for example makes identities under 'type of' + * be compatible no matter the identity. + */ + return core1.equals(core2) + ? assertedDistance(from, to) + : (assertedDistance(from, to) == 0 ? 0 : -1); + } + + private int distance(Concept from, Concept to, boolean acceptAbsent) { + + int ret = 0; + if (from == null && to != null) { + ret = acceptAbsent ? 50 : -50; + } else if (from != null && to == null) { + ret = -50; + } else if (from != null && to != null) { + ret = reasonerService.is(to, from) ? assertedDistance(to, from) : -100; + if (ret >= 0) { + for (Concept t : reasonerService.traits(from)) { + boolean ok = reasonerService.hasTrait(to, t); + if (!ok) { + return -50; + } + } + for (Concept t : reasonerService.traits(to)) { + if (!reasonerService.hasTrait(from, t)) { + ret += 10; + } + } + } + } + + return ret > 100 ? 100 : ret; + } + + public int assertedDistance(Semantics from, Semantics to) { + + if (from == to || from.equals(to)) { + return 0; + } + int ret = 1; + while (true) { + Collection parents = reasonerService.parents(from); + if (parents.isEmpty()) { + break; + } + if (parents.contains(to)) { + return ret; + } + for (Concept parent : parents) { + int d = assertedDistance(from, parent); + if (d >= 0) { + return ret + d; + } + } + ret++; + } + return -1; + } +} diff --git a/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/SyntacticMatcher.java b/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/SyntacticMatcher.java index 72d37650d..d3f55d0d1 100644 --- a/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/SyntacticMatcher.java +++ b/klab.services.reasoner/src/main/java/org/integratedmodelling/klab/services/reasoner/SyntacticMatcher.java @@ -18,9 +18,12 @@ /** * Match two concept using one as a syntactic pattern for the other. Used in the rule system to - * filter resolution strategies. + * filter resolution strategies. Compared with {@link SemanticMatcher} this one requires the + * linguistic "pattern" to be the same, so it may fail when semantic matching succeeds. Used to + * recognize and decompose expressions, for example when selecting resolution strategies. * - *

Keeps syntactic objects in a cache to minimize traffic to the resources service. + *

Keeps syntactic objects and results in a cache to minimize traffic to the resources service. + * Reasoner clients should also provide adequate caching of results for the same reason. */ public class SyntacticMatcher { diff --git a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/DataflowCompiler.java b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/DataflowCompiler.java index 03972c66d..38274def1 100644 --- a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/DataflowCompiler.java +++ b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/DataflowCompiler.java @@ -30,200 +30,210 @@ */ public class DataflowCompiler { - private final ResolutionGraph resolutionGraph; - private final ContextScope scope; - private final Observation observation; - private Set catalog = new HashSet<>(); - - /** - * TODO add the context dataflow. - * - * @param resolutionGraph - * @param scope - */ - public DataflowCompiler(Observation observation, ResolutionGraph resolutionGraph, ContextScope scope) { - this.resolutionGraph = resolutionGraph; - this.scope = scope; - this.observation = observation; + private final ResolutionGraph resolutionGraph; + private final ContextScope scope; + private final Observation observation; + private Set catalog = new HashSet<>(); + + /** + * TODO add the context dataflow. + * + * @param resolutionGraph + * @param scope + */ + public DataflowCompiler( + Observation observation, ResolutionGraph resolutionGraph, ContextScope scope) { + this.resolutionGraph = resolutionGraph; + this.scope = scope; + this.observation = observation; + } + + /** + * Main entry point. When we resolve an ObservationStrategy from the runtime we should use the + * correspondent worker below, after locating the context actuator. + * + * @return + */ + public Dataflow compile() { + + if (resolutionGraph.isEmpty()) { + return Dataflow.empty(Observation.class); } - /** - * Main entry point. When we resolve an ObservationStrategy from the runtime we should use the - * correspondent worker below, after locating the context actuator. - * - * @return - */ - public Dataflow compile() { - - if (resolutionGraph.isEmpty()) { - return Dataflow.empty(Observation.class); - } - - System.out.println(Utils.Graphs.dump(resolutionGraph.graph())); - - Map catalog = new HashMap<>(); - var ret = new DataflowImpl(); - ret.setTarget(observation); - ret.setResolvedCoverage(resolutionGraph.getResolvedCoverage()); - for (var node : resolutionGraph.rootNodes()) { - /* - These MUST be observations. We check for now but it shouldn't happen. - */ - if (!(node instanceof Observation)) { - throw new KlabIllegalStateException("Resolution root is not an observation"); - } - ret.getComputation().addAll(compileObservation(observation, Scale.create(observation.getGeometry()), null)); - } - - var out = new StringWriter(); - var dioCan = new PrintWriter(out); - new DataflowEncoder(ret, scope).encode(dioCan); - - System.out.println(out.toString()); - - return ret; + System.out.println(Utils.Graphs.dump(resolutionGraph.graph())); + + Map catalog = new HashMap<>(); + var ret = new DataflowImpl(); + ret.setTarget(observation); + ret.setResolvedCoverage(resolutionGraph.getResolvedCoverage()); + for (var node : resolutionGraph.rootNodes()) { + /* + These MUST be observations. We check for now but it shouldn't happen. + */ + if (!(node instanceof Observation)) { + throw new KlabIllegalStateException("Resolution root is not an observation"); + } + ret.getComputation() + .addAll(compileObservation(observation, Scale.create(observation.getGeometry()), null)); } - /** - * The entry point is calling this with a null strategy for all root observation nodes. Otherwise locate - * and contextualize the entry point and call one of the others on the correspondent actuator. - * - * @param observation - * @param strategy - * @return - */ - - List compileObservation(Observation observation, Geometry coverage, ObservationStrategy strategy) { - - if (catalog.contains(observation)) { - var ret = new ActuatorImpl(); - ret.setObservable(observation.getObservable()); - ret.setId(observation.getId()); - ret.setActuatorType(strategy == null ? Actuator.Type.RESOLVE : Actuator.Type.OBSERVE); - ret.setCoverage(coverage.as(Geometry.class)); - ret.setActuatorType(Actuator.Type.REFERENCE); - return List.of(ret); - } - - catalog.add(observation); - - var ret = new ArrayList(); - for (var edge : resolutionGraph.graph().outgoingEdgesOf(observation)) { - - var child = resolutionGraph.graph().getEdgeTarget(edge); - var childCoverage = edge.coverage; - - if (child instanceof ObservationStrategy observationStrategy) { - var actuator = new ActuatorImpl(); - actuator.setObservable(observation.getObservable()); - actuator.setId(observation.getId()); - actuator.setActuatorType(Actuator.Type.OBSERVE); - actuator.setCoverage(childCoverage == null ? null : childCoverage.as(Geometry.class)); - actuator.setStrategyUrn(observationStrategy.getUrn()); - compileStrategy(actuator, observation, childCoverage, observationStrategy); - ret.add(actuator); - } - } - - return ret; + var out = new StringWriter(); + var dioCan = new PrintWriter(out); + new DataflowEncoder(ret, scope).encode(dioCan); + + System.out.println(out.toString()); + + return ret; + } + + /** + * The entry point is calling this with a null strategy for all root observation nodes. Otherwise + * locate and contextualize the entry point and call one of the others on the correspondent + * actuator. + * + * @param observation + * @param strategy + * @return + */ + List compileObservation( + Observation observation, Geometry coverage, ObservationStrategy strategy) { + + if (catalog.contains(observation)) { + var ret = new ActuatorImpl(); + ret.setObservable(observation.getObservable()); + ret.setId(observation.getId()); + ret.setActuatorType(strategy == null ? Actuator.Type.RESOLVE : Actuator.Type.OBSERVE); + ret.setCoverage(coverage.as(Geometry.class)); + ret.setActuatorType(Actuator.Type.REFERENCE); + return List.of(ret); } - /** - * The strategy produces model actuators within the observation's - * - * @param observationActuator - * @param observation - * @param scale - * @param observationStrategy - * @return - */ - void compileStrategy(Actuator observationActuator, Observation observation, Geometry scale, - ObservationStrategy observationStrategy) { - - for (var edge : resolutionGraph.graph().outgoingEdgesOf(observationStrategy)) { - - var child = resolutionGraph.graph().getEdgeTarget(edge); - var coverage = edge.coverage; - - if (child instanceof Model model) { - compileModel(observationActuator, observation, coverage, observationStrategy, model); - } - } - - // children: model (the plan for the resolution), Strategy (deferred) - // if model - // determine model coverage, intersect if needed - // pass to compileModel(actuator, obs, scale, strategy, model) - // if strategy - // compile into actuator for deferring at point of resolution (in computations) - // + catalog.add(observation); + + var ret = new ArrayList(); + for (var edge : resolutionGraph.graph().outgoingEdgesOf(observation)) { + + var child = resolutionGraph.graph().getEdgeTarget(edge); + var childCoverage = edge.coverage; + + if (child instanceof ObservationStrategy observationStrategy) { + var actuator = new ActuatorImpl(); + actuator.setObservable(observation.getObservable()); + actuator.setId(observation.getId()); + actuator.setActuatorType(Actuator.Type.OBSERVE); + actuator.setCoverage(childCoverage == null ? null : childCoverage.as(Geometry.class)); + actuator.setStrategyUrn(observationStrategy.getUrn()); + compileStrategy(actuator, observation, childCoverage, observationStrategy); + ret.add(actuator); + } + } + + return ret; + } + + /** + * The strategy produces model actuators within the observation's + * + * @param observationActuator + * @param observation + * @param scale + * @param observationStrategy + * @return + */ + void compileStrategy( + Actuator observationActuator, + Observation observation, + Geometry scale, + ObservationStrategy observationStrategy) { + + for (var edge : resolutionGraph.graph().outgoingEdgesOf(observationStrategy)) { + + var child = resolutionGraph.graph().getEdgeTarget(edge); + var coverage = edge.coverage; + + if (child instanceof Model model) { + compileModel(observationActuator, observation, coverage, observationStrategy, model); + } + } + + // children: model (the plan for the resolution), Strategy (deferred) + // if model + // determine model coverage, intersect if needed + // pass to compileModel(actuator, obs, scale, strategy, model) + // if strategy + // compile into actuator for deferring at point of resolution (in computations) + // + } + + /** + * Compile a model's actuators within the observation's under a strategy + * + * @param observationActuator + * @param observation + * @param scale + * @param observationStrategy + * @param model + */ + void compileModel( + Actuator observationActuator, + Observation observation, + Geometry scale, + ObservationStrategy observationStrategy, + Model model) { + + for (var edge : resolutionGraph.graph().outgoingEdgesOf(model)) { + + var child = resolutionGraph.graph().getEdgeTarget(edge); + var coverage = edge.coverage; + + if (child instanceof Observation dependentObservation) { + observationActuator + .getChildren() + .addAll(compileObservation(dependentObservation, coverage, observationStrategy)); + } } - /** - * Compile a model's actuators within the observation's under a strategy - * - * @param observationActuator - * @param observation - * @param scale - * @param observationStrategy - * @param model - */ - void compileModel(Actuator observationActuator, Observation observation, Geometry scale, - ObservationStrategy observationStrategy, Model model) { - - for (var edge : resolutionGraph.graph().outgoingEdgesOf(model)) { - - var child = resolutionGraph.graph().getEdgeTarget(edge); - var coverage = edge.coverage; - - if (child instanceof Observation dependentObservation) { - observationActuator.getChildren().addAll(compileObservation(dependentObservation, coverage, observationStrategy)); - } - } - - for (var contextualizer : model.getComputation()) { - observationActuator.getComputation().add(adaptContextualizer(contextualizer)); - } - - // compileModel(actuator, obs, scale, strategy, model) - // finds Observation: call compileObservation(obs, strategy), - // add all computations and any deferrals + for (var contextualizer : model.getComputation()) { + observationActuator.getComputation().add(adaptContextualizer(contextualizer)); } - /** - * Turn each contextualizer into a runtime-supported call and return the call. - * - * @param contextualizer - * @return - */ - private ServiceCall adaptContextualizer(Contextualizable contextualizer) { - - ServiceCall ret = null; - - if (contextualizer.getServiceCall() != null) { - ret = contextualizer.getServiceCall(); - } else if (contextualizer.getResourceUrn() != null) { - ret = new ServiceCallImpl(RuntimeService.CoreFunctor.URN_RESOLVER.getServiceCall()); - ret.getParameters().putUnnamed(new Urn(contextualizer.getResourceUrn())); - } else if (contextualizer.getAccordingTo() != null) { - ret = new ServiceCallImpl(RuntimeService.CoreFunctor.LUT_RESOLVER.getServiceCall()); - } else if (contextualizer.getClassification() != null) { - ret = new ServiceCallImpl(RuntimeService.CoreFunctor.LUT_RESOLVER.getServiceCall()); - } else if (contextualizer.getLookupTable() != null) { - ret = new ServiceCallImpl(RuntimeService.CoreFunctor.LUT_RESOLVER.getServiceCall()); - } else if (contextualizer.getExpression() != null) { - ret = new ServiceCallImpl(RuntimeService.CoreFunctor.EXPRESSION_RESOLVER.getServiceCall()); - } else if (contextualizer.getObservationStrategy() != null) { - ret = new ServiceCallImpl(RuntimeService.CoreFunctor.DEFER_RESOLUTION.getServiceCall()); - } else if (contextualizer.getLiteral() != null) { - ret = new ServiceCallImpl(RuntimeService.CoreFunctor.CONSTANT_RESOLVER.getServiceCall()); - } - - // TODO add remaining info from the contextualizable in the call's metadata - - - // TODO more? - return ret; + // compileModel(actuator, obs, scale, strategy, model) + // finds Observation: call compileObservation(obs, strategy), + // add all computations and any deferrals + } + + /** + * Turn each contextualizer into a runtime-supported call and return the call. + * + * @param contextualizer + * @return + */ + private ServiceCall adaptContextualizer(Contextualizable contextualizer) { + + ServiceCall ret = null; + + if (contextualizer.getServiceCall() != null) { + ret = contextualizer.getServiceCall(); + } else if (contextualizer.getResourceUrn() != null) { + ret = new ServiceCallImpl(RuntimeService.CoreFunctor.URN_RESOLVER.getServiceCall()); + ret.getParameters().putUnnamed(new Urn(contextualizer.getResourceUrn())); + } else if (contextualizer.getAccordingTo() != null) { + ret = new ServiceCallImpl(RuntimeService.CoreFunctor.LUT_RESOLVER.getServiceCall()); + } else if (contextualizer.getClassification() != null) { + ret = new ServiceCallImpl(RuntimeService.CoreFunctor.LUT_RESOLVER.getServiceCall()); + } else if (contextualizer.getLookupTable() != null) { + ret = new ServiceCallImpl(RuntimeService.CoreFunctor.LUT_RESOLVER.getServiceCall()); + } else if (contextualizer.getExpression() != null) { + ret = new ServiceCallImpl(RuntimeService.CoreFunctor.EXPRESSION_RESOLVER.getServiceCall()); + } else if (contextualizer.getObservationStrategy() != null) { + ret = new ServiceCallImpl(RuntimeService.CoreFunctor.DEFER_RESOLUTION.getServiceCall()); + } else if (contextualizer.getLiteral() != null) { + ret = new ServiceCallImpl(RuntimeService.CoreFunctor.CONSTANT_RESOLVER.getServiceCall()); } + // TODO add remaining info from the contextualizable in the call's metadata + + // TODO more? + return ret; + } } diff --git a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/PrioritizerImpl.java b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/PrioritizerImpl.java index d6eb9dc5b..0721aa244 100644 --- a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/PrioritizerImpl.java +++ b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/PrioritizerImpl.java @@ -12,40 +12,39 @@ public class PrioritizerImpl implements Prioritizer { - private ContextScope scope; - private Scale scale; - private Map ranks = new HashMap<>(); - - public PrioritizerImpl(ContextScope scope, Scale scale) { - this.scope = scope; - this.scale = scale; - // TODO establish the comparison strategy from the scope or the resolution namespace, - // defaulting at the default. + private ContextScope scope; + private Scale scale; + private Map ranks = new HashMap<>(); + + public PrioritizerImpl(ContextScope scope, Scale scale) { + this.scope = scope; + this.scale = scale; + // TODO establish the comparison strategy from the scope or the resolution namespace, + // defaulting at the default. + } + + @Override + public int compare(Model o1, Model o2) { + // TODO Auto-generated method stub + return 0; + } + + @Override + public Metadata computeCriteria(Model model) { + if (this.ranks.containsKey(model)) { + return this.ranks.get(model); } - - @Override - public int compare(Model o1, Model o2) { - // TODO Auto-generated method stub - return 0; - } - - @Override - public Metadata computeCriteria(Model model) { - if (this.ranks.containsKey(model)) { - return this.ranks.get(model); - } - return null; - } - - @Override - public List listCriteria() { - // TODO Auto-generated method stub - return null; - } - - @Override - public Metadata getRanking(Model ranked) { - return this.ranks.get(ranked); - } - + return null; + } + + @Override + public List listCriteria() { + // TODO Auto-generated method stub + return null; + } + + @Override + public Metadata getRanking(Model ranked) { + return this.ranks.get(ranked); + } } diff --git a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolutionCompiler.java b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolutionCompiler.java index 86a3e60db..50db82770 100644 --- a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolutionCompiler.java +++ b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolutionCompiler.java @@ -22,329 +22,343 @@ import java.util.ArrayList; import java.util.List; -/** - * Obviously a placeholder for the resolver 2.0 - */ +/** Obviously a placeholder for the resolver 2.0 */ public class ResolutionCompiler { - private final ResolverService resolver; - private double MINIMUM_WORTHWHILE_CONTRIBUTION = 0.15; + private final ResolverService resolver; + private double MINIMUM_WORTHWHILE_CONTRIBUTION = 0.15; + + public ResolutionCompiler(ResolverService service) { + this.resolver = service; + } + + /** + * Entry point for observations at root level. + * + * @param observation + * @param scope + * @return + */ + public ResolutionGraph resolve(Observation observation, ContextScope scope) { + return resolve(observation, scope, ResolverService.getResolutionGraph(scope)); + } + + private ResolutionGraph resolve( + Observation observation, ContextScope scope, ResolutionGraph parentGraph) { + + var resolutionGeometry = scope.getObservationGeometry(observation); + if (resolutionGeometry == null || resolutionGeometry.isEmpty()) { + return ResolutionGraph.empty(); + } - public ResolutionCompiler(ResolverService service) { - this.resolver = service; + var scale = Scale.create(resolutionGeometry, scope); + Coverage coverage = Coverage.create(scale, 0.0); + for (var resolvable : parentGraph.getResolving(observation.getObservable(), scale)) { + if (resolvable.getSecond().getGain() < MINIMUM_WORTHWHILE_CONTRIBUTION) { + continue; + } + parentGraph.accept(resolvable.getFirst(), resolvable.getSecond()); + coverage.merge(resolvable.getSecond(), LogicalConnector.UNION); + if (coverage.isComplete()) { + break; + } } - /** - * Entry point for observations at root level. - * - * @param observation - * @param scope - * @return - */ - public ResolutionGraph resolve(Observation observation, ContextScope scope) { - return resolve(observation, scope, ResolverService.getResolutionGraph(scope)); + if (coverage.isComplete()) { + return parentGraph; } - private ResolutionGraph resolve(Observation observation, ContextScope scope, - ResolutionGraph parentGraph) { + ResolutionGraph ret = parentGraph.createChild(observation, scale); + boolean complete = false; + + scope = + scope.withResolutionConstraints( + ResolutionConstraint.of( + ResolutionConstraint.Type.Provenance, Agent.create(AgentImpl.KLAB_AGENT_NAME))); + + List strategyGraphs = new ArrayList<>(); + for (ObservationStrategy strategy : + scope.getService(Reasoner.class).computeObservationStrategies(observation, scope)) { + + var strategyResolution = resolve(strategy, scale, ret, scope); + var cov = strategyResolution.checkCoverage(strategyResolution); + if (!cov.isRelevant()) { + continue; + } + strategyGraphs.add(strategyResolution); + if (cov.isComplete()) { + complete = true; + break; + } + } - var resolutionGeometry = scope.getObservationGeometry(observation); - if (resolutionGeometry == null || resolutionGeometry.isEmpty()) { - return ResolutionGraph.empty(); - } + if (complete) { + for (var strategyGraph : strategyGraphs) { + ret.merge(strategyGraph); + } + return ret; + } - var scale = Scale.create(resolutionGeometry, scope); - Coverage coverage = Coverage.create(scale, 0.0); - for (var resolvable : parentGraph.getResolving(observation.getObservable(), scale)) { - if (resolvable.getSecond().getGain() < MINIMUM_WORTHWHILE_CONTRIBUTION) { - continue; - } - parentGraph.accept(resolvable.getFirst(), resolvable.getSecond()); - coverage.merge(resolvable.getSecond(), LogicalConnector.UNION); - if (coverage.isComplete()) { - break; - } - } + return ResolutionGraph.empty(); + } - if (coverage.isComplete()) { - return parentGraph; - } + private ResolutionGraph resolve( + ObservationStrategy observationStrategy, + Scale scaleToCover, + ResolutionGraph graph, + ContextScope scope) { + + var ret = graph.createChild(observationStrategy, scaleToCover); - ResolutionGraph ret = parentGraph.createChild(observation, scale); - boolean complete = false; + for (var operation : observationStrategy.getOperations()) { - scope = scope.withResolutionConstraints(ResolutionConstraint.of(ResolutionConstraint.Type.Provenance, Agent.create(AgentImpl.KLAB_AGENT_NAME))); + switch (operation.getType()) { + case RESOLVE -> { + var contextualizedScope = + contextualizeScope(scope, operation.getObservable(), scaleToCover, graph); - List strategyGraphs = new ArrayList<>(); - for (ObservationStrategy strategy : - scope.getService(Reasoner.class).computeObservationStrategies(observation, scope)) { + if (contextualizedScope == null) { + return ResolutionGraph.empty(); + } + + var observableResolution = + resolve( + operation.getObservable(), + contextualizedScope.getSecond(), + ret, + contextualizedScope.getFirst()); + var cov = ret.checkCoverage(observableResolution); + if (!cov.isRelevant()) { + return ResolutionGraph.empty(); + } + ret.merge(observableResolution); + } + case OBSERVE -> { + boolean complete = false; + List modelGraphs = new ArrayList<>(); + var contextualizedScope = + contextualizeScope(scope, operation.getObservable(), scaleToCover, graph); - var strategyResolution = resolve(strategy, scale, ret, scope); - var cov = strategyResolution.checkCoverage(strategyResolution); + if (contextualizedScope == null) { + return ResolutionGraph.empty(); + } + + for (Model model : + queryModels( + operation.getObservable(), + contextualizedScope.getFirst(), + contextualizedScope.getSecond())) { + + var modelResolution = resolve(model, scaleToCover, ret, scope); + var cov = ret.checkCoverage(modelResolution); if (!cov.isRelevant()) { - continue; + continue; } - strategyGraphs.add(strategyResolution); + modelGraphs.add(modelResolution); if (cov.isComplete()) { - complete = true; - break; + complete = true; + break; } - } + } - if (complete) { - for (var strategyGraph : strategyGraphs) { - ret.merge(strategyGraph); + if (complete) { + for (var modelGraph : modelGraphs) { + ret.merge(modelGraph); } - return ret; + } else { + return ResolutionGraph.empty(); + } } + case APPLY -> { + + /** + * We ask the runtime to resolve all the contextualizables as a single operation. This + * will enable using anything that's supported natively in the runtime as well as using + * the resources service to locate and install any needed components or resources. + * + *

The strategy goes in the graph so there is no need for further storage of the + * contextualizers. + */ + var runtime = scope.getService(RuntimeService.class); + ResourceSet requirements = + runtime.resolveContextualizables(operation.getContextualizables(), scope); + + if (requirements.isEmpty()) { + return ResolutionGraph.empty(); + } - return ResolutionGraph.empty(); - } - - private ResolutionGraph resolve(ObservationStrategy observationStrategy, Scale scaleToCover, - ResolutionGraph graph, ContextScope scope) { - - var ret = graph.createChild(observationStrategy, scaleToCover); - - for (var operation : observationStrategy.getOperations()) { - - switch (operation.getType()) { - - case RESOLVE -> { - - var contextualizedScope = contextualizeScope(scope, operation.getObservable(), - scaleToCover - , graph); - - if (contextualizedScope == null) { - return ResolutionGraph.empty(); - } - - var observableResolution = resolve(operation.getObservable(), - contextualizedScope.getSecond(), ret, - contextualizedScope.getFirst()); - var cov = ret.checkCoverage(observableResolution); - if (!cov.isRelevant()) { - return ResolutionGraph.empty(); - } - ret.merge(observableResolution); - - } - case OBSERVE -> { - - boolean complete = false; - List modelGraphs = new ArrayList<>(); - var contextualizedScope = contextualizeScope(scope, operation.getObservable(), - scaleToCover - , graph); - - if (contextualizedScope == null) { - return ResolutionGraph.empty(); - } - - for (Model model : queryModels(operation.getObservable(), - contextualizedScope.getFirst(), contextualizedScope.getSecond())) { - - var modelResolution = resolve(model, scaleToCover, ret, scope); - var cov = ret.checkCoverage(modelResolution); - if (!cov.isRelevant()) { - continue; - } - modelGraphs.add(modelResolution); - if (cov.isComplete()) { - complete = true; - break; - } - } - - if (complete) { - for (var modelGraph : modelGraphs) { - ret.merge(modelGraph); - } - } else { - return ResolutionGraph.empty(); - } - - } - case APPLY -> { - - /** - * We ask the runtime to resolve all the contextualizables as a single - * operation. This will enable using anything that's supported natively - * in the runtime as well as using the resources service to locate and install - * any needed components or resources. - * - * The strategy goes in the graph so there is no need for further storage of the - * contextualizers. - */ - var runtime = scope.getService(RuntimeService.class); - ResourceSet requirements = - runtime.resolveContextualizables(operation.getContextualizables(), scope); - - if (requirements.isEmpty()) { - return ResolutionGraph.empty(); - } - - ret.setDependencies(Utils.Resources.merge(ret.getDependencies(), requirements)); - } - } - - if (!ret.isEmpty()) { - - // add any deferrals to the compiled strategy node and return it - for (var deferral : operation.getContextualStrategies()) { - - } - } + ret.setDependencies(Utils.Resources.merge(ret.getDependencies(), requirements)); } + } + if (!ret.isEmpty()) { - return ret; + // add any deferrals to the compiled strategy node and return it + for (var deferral : operation.getContextualStrategies()) {} + } } - private ResolutionGraph resolve(Model model, Scale scaleToCover, ResolutionGraph graph, - ContextScope scope) { + return ret; + } - var ret = graph.createChild(model, scaleToCover); + private ResolutionGraph resolve( + Model model, Scale scaleToCover, ResolutionGraph graph, ContextScope scope) { - scope = scope.withResolutionConstraints( - ResolutionConstraint.of( - ResolutionConstraint.Type.ResolutionNamespace, model.getNamespace()), - ResolutionConstraint.of(ResolutionConstraint.Type.ResolutionProject, model.getProjectName())); + var ret = graph.createChild(model, scaleToCover); - // check that all contextualizers are supported - var runtime = scope.getService(RuntimeService.class); - ResourceSet requirements = - runtime.resolveContextualizables(model.getComputation(), scope); + scope = + scope.withResolutionConstraints( + ResolutionConstraint.of( + ResolutionConstraint.Type.ResolutionNamespace, model.getNamespace()), + ResolutionConstraint.of( + ResolutionConstraint.Type.ResolutionProject, model.getProjectName())); - if (requirements.isEmpty()) { - return ResolutionGraph.empty(); - } - ret.setDependencies(Utils.Resources.merge(requirements, ret.getDependencies())); - - /* - resolve all dependencies - */ - boolean complete = model.getDependencies().isEmpty(); - List> modelGraphs = new ArrayList<>(); - for (var dependency : model.getDependencies()) { - - var dependencyResolution = resolve(dependency, scaleToCover, ret, scope); - var cov = ret.checkCoverage(dependencyResolution); - if (!cov.isRelevant()) { - if (dependency.isOptional()) { - continue; - } else { - return ResolutionGraph.empty(); - } - } - modelGraphs.add(Pair.of(dependencyResolution, dependency.getStatedName())); - } + // check that all contextualizers are supported + var runtime = scope.getService(RuntimeService.class); + ResourceSet requirements = runtime.resolveContextualizables(model.getComputation(), scope); - for (var modelGraph : modelGraphs) { - ret.merge(modelGraph.getFirst(), modelGraph.getSecond()); - } - - return ret; + if (requirements.isEmpty()) { + return ResolutionGraph.empty(); } + ret.setDependencies(Utils.Resources.merge(requirements, ret.getDependencies())); - private Pair contextualizeScope(ContextScope originalScope, Observable observable, - Scale originalScale, - ResolutionGraph resolutionSoFar) { - Scale scale = originalScale; - ContextScope scope = originalScope; - if (observable.isCollective()) { - /* - * Use the observer's scale if there is an observer - */ - if (scope.getObserver() != null) { - scale = Scale.create(scope.getObserver().getGeometry()); - } - } else if (!SemanticType.isSubstantial(observable.getSemantics().getType())) { - /* - * must have a context in the scope (and it must be compatible for the inherency) - */ - Observation context = resolutionSoFar.getContextObservation(); - if (context == null) { - scope.error("Cannot resolve a dependent without a context substantial observation: " + observable.getUrn()); - return null; - } - scope = scope.within(context); + /* + resolve all dependencies + */ + boolean complete = model.getDependencies().isEmpty(); + List> modelGraphs = new ArrayList<>(); + for (var dependency : model.getDependencies()) { + + var dependencyResolution = resolve(dependency, scaleToCover, ret, scope); + var cov = ret.checkCoverage(dependencyResolution); + if (!cov.isRelevant()) { + if (dependency.isOptional()) { + continue; + } else { + return ResolutionGraph.empty(); } - - return Pair.of(scope.withResolutionConstraints(ResolutionConstraint.of( - ResolutionConstraint.Type.Geometry, scale.as(Geometry.class))), scale); + } + modelGraphs.add(Pair.of(dependencyResolution, dependency.getStatedName())); } - private ResolutionGraph resolve(Observable observable, Scale scaleToCover, ResolutionGraph graph, - ContextScope scope) { - - var contextualizedScope = contextualizeScope(scope, observable, scaleToCover, graph); - - // create the observation in unresolved state - var observation = requireObservation(observable, contextualizedScope.getFirst(), - contextualizedScope.getSecond().as(Geometry.class)); - - if (observation == null) { - return ResolutionGraph.empty(); - } + for (var modelGraph : modelGraphs) { + ret.merge(modelGraph.getFirst(), modelGraph.getSecond()); + } - // resolve the observation in the scope - return resolve(observation, contextualizedScope.getFirst(), graph); + return ret; + } + + private Pair contextualizeScope( + ContextScope originalScope, + Observable observable, + Scale originalScale, + ResolutionGraph resolutionSoFar) { + Scale scale = originalScale; + ContextScope scope = originalScope; + if (observable.isCollective()) { + /* + * Use the observer's scale if there is an observer + */ + if (scope.getObserver() != null) { + scale = Scale.create(scope.getObserver().getGeometry()); + } + } else if (!SemanticType.isSubstantial(observable.getSemantics().getType())) { + /* + * must have a context in the scope (and it must be compatible for the inherency) + */ + Observation context = resolutionSoFar.getContextObservation(); + if (context == null) { + scope.error( + "Cannot resolve a dependent without a context substantial observation: " + + observable.getUrn()); + return null; + } + scope = scope.within(context); } - /** - * Query all the resource servers available in the scope to find the models that can observe the passed - * observable. The result should be ranked in decreasing order of fit to the context and the - * RESOLUTION_SCORE ranking should be in their metadata. - * - * @param observable - * @param scope - * @return - */ - public List queryModels(Observable observable, ContextScope scope, Scale scale) { + return Pair.of( + scope.withResolutionConstraints( + ResolutionConstraint.of(ResolutionConstraint.Type.Geometry, scale.as(Geometry.class))), + scale); + } - var prioritizer = new PrioritizerImpl(scope, scale); + private ResolutionGraph resolve( + Observable observable, Scale scaleToCover, ResolutionGraph graph, ContextScope scope) { - System.out.println("QUERYING MODELS FOR " + observable); + var contextualizedScope = contextualizeScope(scope, observable, scaleToCover, graph); - // FIXME use virtual threads & join() to obtain a synchronized list of ResourceSet, then - // use a merging strategy to get models one by one in their latest release + // create the observation in unresolved state + var observation = + requireObservation( + observable, + contextualizedScope.getFirst(), + contextualizedScope.getSecond().as(Geometry.class)); - var resources = scope.getService(ResourcesService.class); - ResourceSet models = resources.queryModels(observable, scope); - var ret = new ArrayList(resolver.ingestResources(models, scope, Model.class)); - ret.sort(prioritizer); - return ret; + if (observation == null) { + return ResolutionGraph.empty(); } - /** - * If the runtime contains the observation, return it (in resolved or unresolved status but with a valid - * ID). Otherwise create one in the geometry that the scope implies, with the unresolved ID, and return it - * for submission to the knowledge graph. - * - * @param observable - * @param scope - * @return a non-null observation - */ - private Observation requireObservation(Observable observable, ContextScope scope, Geometry geometry) { - var ret = scope.query(Observation.class, observable); - if (ret.isEmpty()) { - - var newObs = DigitalTwin.createObservation(scope, observable, geometry); - var id = scope.getService(RuntimeService.class).submit(newObs, scope); - if (id >= 0) { - ret = scope.query(Observation.class, observable); - } - } - - /* TODO this should also happen if the inherency is incompatible with the semantics for dependent - observables */ - if (ret.isEmpty()) { - scope.error("Cannot instantiate observation of " + observable.getUrn() + " in context " + scope.getId()); - return null; - } + // resolve the observation in the scope + return resolve(observation, contextualizedScope.getFirst(), graph); + } + + /** + * Query all the resource servers available in the scope to find the models that can observe the + * passed observable. The result should be ranked in decreasing order of fit to the context and + * the RESOLUTION_SCORE ranking should be in their metadata. + * + * @param observable + * @param scope + * @return + */ + public List queryModels(Observable observable, ContextScope scope, Scale scale) { + + var prioritizer = new PrioritizerImpl(scope, scale); + + System.out.println("QUERYING MODELS FOR " + observable); + + // FIXME use virtual threads & join() to obtain a synchronized list of ResourceSet, then + // use a merging strategy to get models one by one in their latest release + + var resources = scope.getService(ResourcesService.class); + ResourceSet models = resources.queryModels(observable, scope); + var ret = new ArrayList(resolver.ingestResources(models, scope, Model.class)); + ret.sort(prioritizer); + return ret; + } + + /** + * If the runtime contains the observation, return it (in resolved or unresolved status but with a + * valid ID). Otherwise create one in the geometry that the scope implies, with the unresolved ID, + * and return it for submission to the knowledge graph. + * + * @param observable + * @param scope + * @return a non-null observation + */ + private Observation requireObservation( + Observable observable, ContextScope scope, Geometry geometry) { + var ret = scope.query(Observation.class, observable); + if (ret.isEmpty()) { + + var newObs = DigitalTwin.createObservation(scope, observable, geometry); + var id = scope.getService(RuntimeService.class).submit(newObs, scope); + if (id >= 0) { + ret = scope.query(Observation.class, observable); + } + } - return ret.getFirst(); + /* TODO this should also happen if the inherency is incompatible with the semantics for dependent + observables */ + if (ret.isEmpty()) { + scope.error( + "Cannot instantiate observation of " + + observable.getUrn() + + " in context " + + scope.getId()); + return null; } + + return ret.getFirst(); + } } diff --git a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolutionGraph.java b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolutionGraph.java index 2fb4f56f0..599bf2a94 100644 --- a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolutionGraph.java +++ b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolutionGraph.java @@ -23,245 +23,250 @@ /** * Next-gen Resolution graph, to substitute Resolution/ResolutionImpl. - *

- * The nodes can be: + * + *

The nodes can be: + * *

    - *
  1. Observations
  2. - *
  3. Models
  4. - *
  5. Observables to defer resolution to
  6. + *
  7. Observations + *
  8. Models + *
  9. Observables to defer resolution to *
- *

- * These are inserted in the graph before resolution. If they are unresolved (i.e. are models or are - * unresolved observations) they remain there as cached unresolved. The root nodes can only be observations of - * substantials. - *

- * The edges report the resolution coverage of the source resolvable. When the target is an observable, the - * coverage is unknown until there has been a trip back to the runtime. - *

- * The graph also contains a cache of resolved resolvables with their native coverage, indexed by - * observable, so that the resolver can quickly assess if a previously used resolvable can be used for other - * resolutions before searching for models. + * + *

These are inserted in the graph before resolution. If they are unresolved (i.e. are models or + * are unresolved observations) they remain there as cached unresolved. The root nodes can only be + * observations of substantials. + * + *

The edges report the resolution coverage of the source resolvable. When the target is an + * observable, the coverage is unknown until there has been a trip back to the runtime. + * + *

The graph also contains a cache of resolved resolvables with their native coverage, indexed by + * observable, so that the resolver can quickly assess if a previously used resolvable can be used + * for other resolutions before searching for models. */ public class ResolutionGraph { - private Resolvable target; - private Coverage targetCoverage; - private ContextScope rootScope; - private DefaultDirectedGraph graph = - new DefaultDirectedGraph<>(ResolutionEdge.class); - private ResolutionGraph parent; + private Resolvable target; + private Coverage targetCoverage; + private ContextScope rootScope; + private DefaultDirectedGraph graph = + new DefaultDirectedGraph<>(ResolutionEdge.class); + private ResolutionGraph parent; - // these are only used in the root graph. They collect the merged dependencies of all - // strategies and models, added only after the runtime has successfully resolved them. - private ResourceSet dependencies = new ResourceSet(); + // these are only used in the root graph. They collect the merged dependencies of all + // strategies and models, added only after the runtime has successfully resolved them. + private ResourceSet dependencies = new ResourceSet(); - /** - * A catalog per observable of all resolving sources seen, used by merging their native coverage with any - * resolving candidate before new strategies are attempted. Includes resolved observations, resolved - * models and (eventually) resolved external dataflows. - */ - private Map> resolutionCatalog = new HashMap<>(); - private boolean empty; - - private ResolutionGraph(ContextScope rootScope) { - this.rootScope = rootScope; - } - - public Graph graph() { - return this.graph; - } + /** + * A catalog per observable of all resolving sources seen, used by merging their native coverage + * with any resolving candidate before new strategies are attempted. Includes resolved + * observations, resolved models and (eventually) resolved external dataflows. + */ + private Map> resolutionCatalog = new HashMap<>(); - public double getResolvedCoverage() { - return targetCoverage == null ? 0 : targetCoverage.getCoverage(); - } + private boolean empty; - private ResolutionGraph(Resolvable target, Scale scaleToCover, ResolutionGraph parent) { + private ResolutionGraph(ContextScope rootScope) { + this.rootScope = rootScope; + } - if (parent.empty) { - throw new KlabIllegalStateException("cannot use an empty resolution graph"); - } + public Graph graph() { + return this.graph; + } - this.parent = parent; + public double getResolvedCoverage() { + return targetCoverage == null ? 0 : targetCoverage.getCoverage(); + } - /** - * Models are resolved from full down, intersecting the coverage of the dependencies. Everything - * else is resolved from zero up, uniting the coverages. - */ - this.target = target; - this.targetCoverage = Coverage.create(scaleToCover, target instanceof Model ? 1.0 : 0.0); - var tc = getCoverage(target); - if (tc != null) { - this.targetCoverage = targetCoverage.merge(tc, LogicalConnector.INTERSECTION); - } + private ResolutionGraph(Resolvable target, Scale scaleToCover, ResolutionGraph parent) { - this.rootScope = parent.rootScope; - this.resolutionCatalog.putAll(parent.resolutionCatalog); + if (parent.empty) { + throw new KlabIllegalStateException("cannot use an empty resolution graph"); } - private Scale getCoverage(Resolvable target) { - return switch (target) { - case Model model -> Scale.create(model.getCoverage()); - case Observation observation -> Scale.create(observation.getGeometry()); - default -> null; - }; - } - - public ResourceSet getDependencies() { - return rootGraph().dependencies; - } - - private ResolutionGraph rootGraph() { - var ret = this; - while (ret.parent != null) { - ret = ret.parent; - } - return ret; - } + this.parent = parent; /** - * Merge the coverage and return the result without setting the coverage in the graph, just invoked for - * testing stop conditions. - * - * @param resolution - * @return + * Models are resolved from full down, intersecting the coverage of the dependencies. Everything + * else is resolved from zero up, uniting the coverages. */ - public Coverage checkCoverage(ResolutionGraph resolution) { - - if (resolution.isEmpty()) { - return Coverage.empty(); - } - return resolution.targetCoverage.merge(this.targetCoverage, target instanceof Model ? - LogicalConnector.INTERSECTION : - LogicalConnector.UNION); + this.target = target; + this.targetCoverage = Coverage.create(scaleToCover, target instanceof Model ? 1.0 : 0.0); + var tc = getCoverage(target); + if (tc != null) { + this.targetCoverage = targetCoverage.merge(tc, LogicalConnector.INTERSECTION); } - public boolean merge(ResolutionGraph childGraph) { - return merge(childGraph, null); + this.rootScope = parent.rootScope; + this.resolutionCatalog.putAll(parent.resolutionCatalog); + } + + private Scale getCoverage(Resolvable target) { + return switch (target) { + case Model model -> Scale.create(model.getCoverage()); + case Observation observation -> Scale.create(observation.getGeometry()); + default -> null; + }; + } + + public ResourceSet getDependencies() { + return rootGraph().dependencies; + } + + private ResolutionGraph rootGraph() { + var ret = this; + while (ret.parent != null) { + ret = ret.parent; } - - /** - * Accept the resolution contained in the passed graph for its target, adding all sub-resolvables - * collected along the way, then add our resolvable to the graph and updating the target coverage - * according to the kind of merge (uniting for alternative observables, intersecting for model - * dependencies) and the catalog. Return true if our own target coverage is made complete by the merge. - */ - public boolean merge(ResolutionGraph childGraph, String localName) { - - Graphs.addAllVertices(this.graph, childGraph.graph.vertexSet()); - Graphs.addAllEdges(this.graph, childGraph.graph, childGraph.graph.edgeSet()); - - /* - Our resolvable is resolved by the child's - */ - this.graph.addVertex(this.target); - this.graph.addVertex(childGraph.target); - this.graph.addEdge(this.target, childGraph.target, new ResolutionEdge(childGraph.targetCoverage, localName)); - - /* - ...by the amount determined in its coverage, "painting" the incoming extents onto ours. - */ - this.targetCoverage = this.targetCoverage.merge(childGraph.targetCoverage, - this.target instanceof Model ? LogicalConnector.INTERSECTION : LogicalConnector.UNION); - - /* - TODO UPDATE THE CATALOG WITH THE NATIVE COVERAGE OF THE TARGET - */ - - /* - if our coverage is satisfactory, signal that the merge has done all we need - */ - return targetCoverage.isComplete(); + return ret; + } + + /** + * Merge the coverage and return the result without setting the coverage in the graph, just + * invoked for testing stop conditions. + * + * @param resolution + * @return + */ + public Coverage checkCoverage(ResolutionGraph resolution) { + + if (resolution.isEmpty()) { + return Coverage.empty(); } - - /** - * Accept the resolution contained in this graph's objects, adding our resolvable to the graph and - * updating the catalog. Called directly on a parent graph when an existing resolvable is enough to - * resolve the target. + return resolution.targetCoverage.merge( + this.targetCoverage, + target instanceof Model ? LogicalConnector.INTERSECTION : LogicalConnector.UNION); + } + + public boolean merge(ResolutionGraph childGraph) { + return merge(childGraph, null); + } + + /** + * Accept the resolution contained in the passed graph for its target, adding all sub-resolvables + * collected along the way, then add our resolvable to the graph and updating the target coverage + * according to the kind of merge (uniting for alternative observables, intersecting for model + * dependencies) and the catalog. Return true if our own target coverage is made complete by the + * merge. + */ + public boolean merge(ResolutionGraph childGraph, String localName) { + + Graphs.addAllVertices(this.graph, childGraph.graph.vertexSet()); + Graphs.addAllEdges(this.graph, childGraph.graph, childGraph.graph.edgeSet()); + + /* + Our resolvable is resolved by the child's */ - public void accept(Resolvable resolvable, Coverage finalCoverage) { + this.graph.addVertex(this.target); + this.graph.addVertex(childGraph.target); + this.graph.addEdge( + this.target, childGraph.target, new ResolutionEdge(childGraph.targetCoverage, localName)); - // resolvable is in the graph already - - System.out.println("ACCEPTING EXISTING RESOLVABLE INTO SAME GRAPH FOR THIS RESOLVABLE - JUST CREATE" + - " THE LINK FROM THE TARGET TO THE RESOLVABLE AND SET THE COVERAGE"); - } - - public static ResolutionGraph create(ContextScope rootScope) { - return new ResolutionGraph(rootScope); - } - - public static ResolutionGraph empty() { - var ret = new ResolutionGraph(null); - ret.empty = true; - return ret; - } - - public boolean isEmpty() { - return empty; - } - - /** - * Spawn a new resolution graph to resolve the passed observation in the passed scale, as a child of the - * previous. - * - * @param target - * @param scaleToCover - * @return + /* + ...by the amount determined in its coverage, "painting" the incoming extents onto ours. */ - public ResolutionGraph createChild(Resolvable target, Scale scaleToCover) { - return new ResolutionGraph(target, scaleToCover, this); - } + this.targetCoverage = + this.targetCoverage.merge( + childGraph.targetCoverage, + this.target instanceof Model ? LogicalConnector.INTERSECTION : LogicalConnector.UNION); - /** - * Return any known resolvable (already present in the graph) that can resolve the passed observable, - * paired with the result of intersecting its native coverage with the passed scale. - * - * @param observable - * @return + /* + TODO UPDATE THE CATALOG WITH THE NATIVE COVERAGE OF THE TARGET */ - public List> getResolving(Observable observable, Scale scale) { - return List.of(); - } - public Observation getContextObservation() { - ResolutionGraph target = this; - while (target != null && !(target.target instanceof Observation)) { - target = target.parent; - } - return target == null ? null : (Observation) target.target; + /* + if our coverage is satisfactory, signal that the merge has done all we need + */ + return targetCoverage.isComplete(); + } + + /** + * Accept the resolution contained in this graph's objects, adding our resolvable to the graph and + * updating the catalog. Called directly on a parent graph when an existing resolvable is enough + * to resolve the target. + */ + public void accept(Resolvable resolvable, Coverage finalCoverage) { + + // resolvable is in the graph already + + System.out.println( + "ACCEPTING EXISTING RESOLVABLE INTO SAME GRAPH FOR THIS RESOLVABLE - JUST CREATE" + + " THE LINK FROM THE TARGET TO THE RESOLVABLE AND SET THE COVERAGE"); + } + + public static ResolutionGraph create(ContextScope rootScope) { + return new ResolutionGraph(rootScope); + } + + public static ResolutionGraph empty() { + var ret = new ResolutionGraph(null); + ret.empty = true; + return ret; + } + + public boolean isEmpty() { + return empty; + } + + /** + * Spawn a new resolution graph to resolve the passed observation in the passed scale, as a child + * of the previous. + * + * @param target + * @param scaleToCover + * @return + */ + public ResolutionGraph createChild(Resolvable target, Scale scaleToCover) { + return new ResolutionGraph(target, scaleToCover, this); + } + + /** + * Return any known resolvable (already present in the graph) that can resolve the passed + * observable, paired with the result of intersecting its native coverage with the passed scale. + * + * @param observable + * @return + */ + public List> getResolving(Observable observable, Scale scale) { + return List.of(); + } + + public Observation getContextObservation() { + ResolutionGraph target = this; + while (target != null && !(target.target instanceof Observation)) { + target = target.parent; } - - public void setDependencies(ResourceSet dependencies) { - rootGraph().dependencies = dependencies; + return target == null ? null : (Observation) target.target; + } + + public void setDependencies(ResourceSet dependencies) { + rootGraph().dependencies = dependencies; + } + + public List rootNodes() { + List ret = new ArrayList<>(); + for (Resolvable l : graph().vertexSet()) { + if (graph.incomingEdgesOf(l).isEmpty()) { + ret.add(l); + } } + return ret; + } - public List rootNodes() { - List ret = new ArrayList<>(); - for (Resolvable l : graph().vertexSet()) { - if (graph.incomingEdgesOf(l).isEmpty()) { - ret.add(l); - } - } - return ret; - } + /** + * The RESOLVED_BY edge, only including the resolution coverage for now. Each resolvable may have + * >1 resolving nodes, successively covering the extents up to "sufficient" coverage. + * + *

TODO must add the local "name" for the resolving object if one is needed + */ + public static class ResolutionEdge extends DefaultEdge { - /** - * The RESOLVED_BY edge, only including the resolution coverage for now. Each resolvable may - * have >1 resolving nodes, successively covering the extents up to "sufficient" coverage. - * - * TODO must add the local "name" for the resolving object if one is needed - */ - public static class ResolutionEdge extends DefaultEdge { + public Coverage coverage; + public String localName; - public Coverage coverage; - public String localName; + public ResolutionEdge() {} - public ResolutionEdge() { - } - - public ResolutionEdge(Coverage coverage, String localName) { - this.coverage = coverage; - } + public ResolutionEdge(Coverage coverage, String localName) { + this.coverage = coverage; } - + } } diff --git a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolverService.java b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolverService.java index be68e9f24..c69f6d6b0 100644 --- a/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolverService.java +++ b/klab.services.resolver/src/main/java/org/integratedmodelling/klab/services/resolver/ResolverService.java @@ -46,363 +46,387 @@ public class ResolverService extends BaseService implements Resolver { - private static final String RESOLUTION_GRAPH_KEY = "__RESOLUTION_GRAPH__"; - /** - * FIXME this should be modifiable at the scope level - */ - private static double MINIMUM_WORTHWHILE_CONTRIBUTION = 0.15; - - private final String hardwareSignature = Utils.Names.getHardwareId(); - private ResolverConfiguration configuration; - private final ResolutionCompiler resolutionCompiler = new ResolutionCompiler(this); - - public ResolverService(AbstractServiceDelegatingScope scope, ServiceStartupOptions options) { - super(scope, Type.RESOLVER, options); - // setProvideScopesAutomatically(true); - ServiceConfiguration.INSTANCE.setMainService(this); - readConfiguration(options); - // // FIXME switch this to use the ingest() mechanism - // KnowledgeRepository.INSTANCE.setProcessor(KlabAsset.KnowledgeClass.NAMESPACE, - // (ns) -> loadNamespace((KimNamespace) ns, scope)); + private static final String RESOLUTION_GRAPH_KEY = "__RESOLUTION_GRAPH__"; + + /** FIXME this should be modifiable at the scope level */ + private static double MINIMUM_WORTHWHILE_CONTRIBUTION = 0.15; + + private final String hardwareSignature = Utils.Names.getHardwareId(); + private ResolverConfiguration configuration; + private final ResolutionCompiler resolutionCompiler = new ResolutionCompiler(this); + + public ResolverService(AbstractServiceDelegatingScope scope, ServiceStartupOptions options) { + super(scope, Type.RESOLVER, options); + // setProvideScopesAutomatically(true); + ServiceConfiguration.INSTANCE.setMainService(this); + readConfiguration(options); + // // FIXME switch this to use the ingest() mechanism + // KnowledgeRepository.INSTANCE.setProcessor(KlabAsset.KnowledgeClass.NAMESPACE, + // (ns) -> loadNamespace((KimNamespace) ns, scope)); + } + + @Override + protected List ingestResources( + ResourceSet resourceSet, Scope scope, Class resultClass) { + + List ret = new ArrayList<>(); + + final Function> namespaceTranslator = + (namespace) -> { + if (namespace instanceof KimNamespace kimNamespace) { + List mods = new ArrayList<>(); + kimNamespace.getStatements().stream() + .filter(statement -> statement instanceof KimModel kimModel) + .forEach(kimModel -> mods.add(loadModel((KimModel) kimModel, scope))); + return mods; + } + return List.of(); + }; + + return KnowledgeRepository.INSTANCE.ingest( + resourceSet, + scope, + resultClass, + Pair.of(KlabAsset.KnowledgeClass.NAMESPACE, namespaceTranslator)); + } + + private void readConfiguration(ServiceStartupOptions options) { + File config = BaseService.getFileInConfigurationDirectory(options, "resolver.yaml"); + if (config.exists() && config.length() > 0 && !options.isClean()) { + this.configuration = Utils.YAML.load(config, ResolverConfiguration.class); + } else { + // make an empty config + this.configuration = new ResolverConfiguration(); + this.configuration.setServiceId(UUID.randomUUID().toString()); + // TODO anything else we need + saveConfiguration(); + } + } + + private void saveConfiguration() { + File config = BaseService.getFileInConfigurationDirectory(startupOptions, "resolver.yaml"); + org.integratedmodelling.common.utils.Utils.YAML.save(this.configuration, config); + } + + @Override + public boolean shutdown() { + + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceUnavailable, + capabilities(serviceScope())); + + // TODO Auto-generated method stub + return super.shutdown(); + } + + @Override + public Capabilities capabilities(Scope scope) { + + var ret = new ResolverCapabilitiesImpl(); + ret.setLocalName(localName); + ret.setType(Type.RESOLVER); + ret.setUrl(getUrl()); + ret.setServerId(hardwareSignature == null ? null : ("RESOLVER_" + hardwareSignature)); + ret.setServiceId(configuration.getServiceId()); + ret.setServiceName("Resolver"); + ret.setBrokerURI( + (embeddedBroker != null && embeddedBroker.isOnline()) + ? embeddedBroker.getURI() + : configuration.getBrokerURI()); + ret.getExportSchemata().putAll(ResourceTransport.INSTANCE.getExportSchemata()); + ret.getImportSchemata().putAll(ResourceTransport.INSTANCE.getImportSchemata()); + + ret.setAvailableMessagingQueues( + Utils.URLs.isLocalHost(getUrl()) + ? EnumSet.of(Message.Queue.Info, Message.Queue.Errors, Message.Queue.Warnings) + : EnumSet.noneOf(Message.Queue.class)); + return ret; + } + + @Override + public Dataflow resolve(Observation observation, ContextScope contextScope) { + var ret = resolutionCompiler.resolve(observation, contextScope); + if (!ret.isEmpty()) { + return new DataflowCompiler(observation, ret, contextScope).compile(); + } + return Dataflow.empty(Observation.class); + } + + @Override + public String serviceId() { + return configuration.getServiceId(); + } + + // private void loadNamespace(KimNamespace namespace, Scope scope) { + // + // List ret = new ArrayList<>(); + // for (KlabStatement statement : namespace.getStatements()) { + // if (statement instanceof KimModel kimModel) { + // var model = loadModel(kimModel, scope); + // KnowledgeRepository.INSTANCE.registerAsset(model.getUrn(), model, namespace + // .getVersion()); + // } + // } + // } + + private Model loadModel(KimModel statement, Scope scope) { + + var reasoner = scope.getService(Reasoner.class); + + ModelImpl model = new ModelImpl(); + model.getAnnotations().addAll(statement.getAnnotations()); // FIXME process annotations + for (KimObservable observable : statement.getObservables()) { + model.getObservables().add(reasoner.declareObservable(observable)); + } + for (KimObservable observable : statement.getDependencies()) { + model.getDependencies().add(reasoner.declareObservable(observable)); } - @Override - protected List ingestResources(ResourceSet resourceSet, Scope scope, - Class resultClass) { - - List ret = new ArrayList<>(); - - final Function> namespaceTranslator = - (namespace) -> { - if (namespace instanceof KimNamespace kimNamespace) { - List mods = new ArrayList<>(); - kimNamespace.getStatements().stream().filter(statement -> statement instanceof KimModel kimModel).forEach(kimModel -> mods.add(loadModel((KimModel) kimModel, scope))); - return mods; - } - return List.of(); - }; + // TODO learners, geometry covered etc. + model.setUrn(statement.getUrn()); + model.setMetadata( + statement.getMetadata()); // FIXME add processed metadata with the existing symbol table + model.setNamespace(statement.getNamespace()); + model.setProjectName(statement.getProjectName()); - return KnowledgeRepository.INSTANCE.ingest(resourceSet, scope, resultClass, Pair.of(KlabAsset.KnowledgeClass.NAMESPACE, namespaceTranslator)); - } + // TODO any literal value must be added first - private void readConfiguration(ServiceStartupOptions options) { - File config = BaseService.getFileInConfigurationDirectory(options, "resolver.yaml"); - if (config.exists() && config.length() > 0 && !options.isClean()) { - this.configuration = Utils.YAML.load(config, ResolverConfiguration.class); - } else { - // make an empty config - this.configuration = new ResolverConfiguration(); - this.configuration.setServiceId(UUID.randomUUID().toString()); - // TODO anything else we need - saveConfiguration(); - } + for (var resourceUrn : statement.getResourceUrns()) { + // FIXME when >1 this should be one multi-resource contextualizable + // TODO use static builders instead of polymorphic constructors + model.getComputation().add(new ContextualizableImpl(resourceUrn)); } + model.getComputation().addAll(statement.getContextualization()); - private void saveConfiguration() { - File config = BaseService.getFileInConfigurationDirectory(startupOptions, "resolver.yaml"); - org.integratedmodelling.common.utils.Utils.YAML.save(this.configuration, config); - } + // FIXME use coverage from NS or model if any + model.setCoverage(Coverage.universal()); - @Override - public boolean shutdown() { + return model; + } - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceUnavailable, - capabilities(serviceScope())); + @Override + public boolean scopesAreReactive() { + return false; + } - // TODO Auto-generated method stub - return super.shutdown(); - } + @Override + public void initializeService() { - @Override - public Capabilities capabilities(Scope scope) { - - var ret = new ResolverCapabilitiesImpl(); - ret.setLocalName(localName); - ret.setType(Type.RESOLVER); - ret.setUrl(getUrl()); - ret.setServerId(hardwareSignature == null ? null : ("RESOLVER_" + hardwareSignature)); - ret.setServiceId(configuration.getServiceId()); - ret.setServiceName("Resolver"); - ret.setBrokerURI((embeddedBroker != null && embeddedBroker.isOnline()) ? embeddedBroker.getURI() : - configuration.getBrokerURI()); - ret.getExportSchemata().putAll(ResourceTransport.INSTANCE.getExportSchemata()); - ret.getImportSchemata().putAll(ResourceTransport.INSTANCE.getImportSchemata()); - - ret.setAvailableMessagingQueues(Utils.URLs.isLocalHost(getUrl()) ? - EnumSet.of(Message.Queue.Info, Message.Queue.Errors, - Message.Queue.Warnings) : - EnumSet.noneOf(Message.Queue.class)); - return ret; - } + Logging.INSTANCE.setSystemIdentifier("Resolver service: "); - @Override - public Dataflow resolve(Observation observation, ContextScope contextScope) { - var ret = resolutionCompiler.resolve(observation, contextScope); - if (!ret.isEmpty()) { - return new DataflowCompiler(observation, ret, contextScope).compile(); - } - return Dataflow.empty(Observation.class); - } + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceInitializing, + capabilities(serviceScope()).toString()); - @Override - public String serviceId() { - return configuration.getServiceId(); - } + /* + * Components + */ + Set extensionPackages = new LinkedHashSet<>(); + extensionPackages.add("org.integratedmodelling.klab.runtime"); + /* + * Check for updates, load and scan all new plug-ins, returning the main packages to scan + */ + // FIXME update paths and simplify, put in BaseService + // extensionPackages.addAll(Configuration.INSTANCE.updateAndLoadComponents("resolver")); - // private void loadNamespace(KimNamespace namespace, Scope scope) { - // - // List ret = new ArrayList<>(); - // for (KlabStatement statement : namespace.getStatements()) { - // if (statement instanceof KimModel kimModel) { - // var model = loadModel(kimModel, scope); - // KnowledgeRepository.INSTANCE.registerAsset(model.getUrn(), model, namespace - // .getVersion()); - // } - // } - // } - - private Model loadModel(KimModel statement, Scope scope) { - - var reasoner = scope.getService(Reasoner.class); - - ModelImpl model = new ModelImpl(); - model.getAnnotations().addAll(statement.getAnnotations()); // FIXME process annotations - for (KimObservable observable : statement.getObservables()) { - model.getObservables().add(reasoner.declareObservable(observable)); - } - for (KimObservable observable : statement.getDependencies()) { - model.getDependencies().add(reasoner.declareObservable(observable)); - } - - // TODO learners, geometry covered etc. - model.setUrn(statement.getUrn()); - model.setMetadata( - statement.getMetadata()); // FIXME add processed metadata with the existing symbol table - model.setNamespace(statement.getNamespace()); - model.setProjectName(statement.getProjectName()); - - // TODO any literal value must be added first - - for (var resourceUrn : statement.getResourceUrns()) { - // FIXME when >1 this should be one multi-resource contextualizable - // TODO use static builders instead of polymorphic constructors - model.getComputation().add(new ContextualizableImpl(resourceUrn)); - } - model.getComputation().addAll(statement.getContextualization()); - - // FIXME use coverage from NS or model if any - model.setCoverage(Coverage.universal()); - - return model; - } + /* + * Scan all packages registered under the parent package of all k.LAB services. TODO all + * assets from there should be given default permissions (or those encoded with their + * annotations) that are exposed to the admin API. + */ + getComponentRegistry().loadExtensions(extensionPackages.toArray(new String[] {})); - @Override - public boolean scopesAreReactive() { - return false; + /** + * Setup an embedded broker, possibly to be shared with other services, if we're local and there + * is no configured broker. + */ + if (Utils.URLs.isLocalHost(this.getUrl()) && this.configuration.getBrokerURI() == null) { + this.embeddedBroker = new EmbeddedBroker(); } - @Override - public void initializeService() { - - Logging.INSTANCE.setSystemIdentifier("Resolver service: "); - - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceInitializing, - capabilities(serviceScope()).toString()); - - /* - * Components - */ - Set extensionPackages = new LinkedHashSet<>(); - extensionPackages.add("org.integratedmodelling.klab.runtime"); - /* - * Check for updates, load and scan all new plug-ins, returning the main packages to scan - */ - // FIXME update paths and simplify, put in BaseService - // extensionPackages.addAll(Configuration.INSTANCE.updateAndLoadComponents("resolver")); - - /* - * Scan all packages registered under the parent package of all k.LAB services. TODO all - * assets from there should be given default permissions (or those encoded with their - * annotations) that are exposed to the admin API. - */ - getComponentRegistry().loadExtensions(extensionPackages.toArray(new String[]{})); - - /** - * Setup an embedded broker, possibly to be shared with other services, if we're local and there - * is no configured broker. - */ - if (Utils.URLs.isLocalHost(this.getUrl()) && this.configuration.getBrokerURI() == null) { - this.embeddedBroker = new EmbeddedBroker(); - } - - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceAvailable, - capabilities(serviceScope())); + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceAvailable, + capabilities(serviceScope())); + } - } + @Override + public boolean operationalizeService() { + return true; + } - @Override - public boolean operationalizeService() { - return true; - } + @Override + public String encodeDataflow(Dataflow dataflow) { - @Override - public String encodeDataflow(Dataflow dataflow) { - - StringBuilder kdl = new StringBuilder(1024); - - Map resources = new HashMap<>(); - for (Actuator actuator : dataflow.getComputation()) { - kdl.append("\n"); - kdl.append(encodeActuator(actuator, 0, resources)); - } - - StringBuilder ret = new StringBuilder(2048); - ret.append(encodePreamble(dataflow)); - ret.append("\n"); - var res = encodeResources(dataflow, resources); - if (!res.isEmpty()) { - ret.append(res); - ret.append("\n"); - } - ret.append(kdl); - - // if (offset == 0 && parentActuator == null) { - // ret += "@klab " + Version.CURRENT + "\n"; - // ret += "@author 'k.LAB resolver " + creationTime + "'" + "\n"; - // // TODO should encode coverage after the resolver. - // // if (coverage != null && coverage.getExtentCount() > 0) { - // // List scaleSpecs = ((Scale) coverage).getKimSpecification(); - // // if (!scaleSpecs.isEmpty()) { - // // ret += "@coverage load_me_from_some_sidecar_file()"; - // // ret += "\n"; - // // } - // // } - // ret += "\n"; - // } - // - // Pair> structure = getResolutionStructure(); - // - // if (structure == null) { - // for (IActuator actuator : actuators) { - // ret += ((Actuator) actuator).encode(offset, null) + "\n"; - // } - // return ret; - // } - // - // return ret + ((Actuator) structure.getFirst()).encode(0, - // structure.getSecond().isEmpty() ? (List) null : structure.getSecond()); - return ret.toString(); - } + StringBuilder kdl = new StringBuilder(1024); - private StringBuffer encodeResources(Dataflow dataflow, Map resources) { - StringBuffer ret = new StringBuffer(1024); - // TODO - return ret; + Map resources = new HashMap<>(); + for (Actuator actuator : dataflow.getComputation()) { + kdl.append("\n"); + kdl.append(encodeActuator(actuator, 0, resources)); } - private StringBuffer encodePreamble(Dataflow dataflow) { - StringBuffer ret = new StringBuffer(1024); - ret.append("@klab " + Version.CURRENT + "\n"); - ret.append("@author 'k.LAB resolver " + TimeInstant.create().toRFC3339String() + "'" + "\n"); - return ret; + StringBuilder ret = new StringBuilder(2048); + ret.append(encodePreamble(dataflow)); + ret.append("\n"); + var res = encodeResources(dataflow, resources); + if (!res.isEmpty()) { + ret.append(res); + ret.append("\n"); } - - private StringBuffer encodeActuator(Actuator actuator, int offset, Map resources) { - String ofs = org.integratedmodelling.common.utils.Utils.Strings.spaces(offset); - StringBuffer ret = new StringBuffer(1024); - - ret.append(ofs + actuator.getObservable().getDescriptionType().getKdlType() + " " - + actuator.getObservable().getReferenceName() + " (\n"); - - for (Actuator child : actuator.getChildren()) { - ret.append(encodeActuator(child, offset + 2, resources)); - } - - boolean done = false; - for (ServiceCall contextualizable : actuator.getComputation()) { - if (!done) { - ret.append(ofs + ofs + "compute\n"); - } - ret.append(encodeServiceCall(contextualizable, offset + 6, resources) + "\n"); - done = true; - } - - /* - * ? coverage - */ - - // ret.append(ofs + ")" + (actuator.getAlias() == null ? "" : (" named " + actuator.getAlias - // ())) - // + (actuator.getObservable().getObserver() == null - // ? "" - // : (" as " + actuator.getObservable().getObserver().getName())) - // + "\n"); - - return ret; + ret.append(kdl); + + // if (offset == 0 && parentActuator == null) { + // ret += "@klab " + Version.CURRENT + "\n"; + // ret += "@author 'k.LAB resolver " + creationTime + "'" + "\n"; + // // TODO should encode coverage after the resolver. + // // if (coverage != null && coverage.getExtentCount() > 0) { + // // List scaleSpecs = ((Scale) coverage).getKimSpecification(); + // // if (!scaleSpecs.isEmpty()) { + // // ret += "@coverage load_me_from_some_sidecar_file()"; + // // ret += "\n"; + // // } + // // } + // ret += "\n"; + // } + // + // Pair> structure = getResolutionStructure(); + // + // if (structure == null) { + // for (IActuator actuator : actuators) { + // ret += ((Actuator) actuator).encode(offset, null) + "\n"; + // } + // return ret; + // } + // + // return ret + ((Actuator) structure.getFirst()).encode(0, + // structure.getSecond().isEmpty() ? (List) null : structure.getSecond()); + return ret.toString(); + } + + private StringBuffer encodeResources( + Dataflow dataflow, Map resources) { + StringBuffer ret = new StringBuffer(1024); + // TODO + return ret; + } + + private StringBuffer encodePreamble(Dataflow dataflow) { + StringBuffer ret = new StringBuffer(1024); + ret.append("@klab " + Version.CURRENT + "\n"); + ret.append("@author 'k.LAB resolver " + TimeInstant.create().toRFC3339String() + "'" + "\n"); + return ret; + } + + private StringBuffer encodeActuator( + Actuator actuator, int offset, Map resources) { + String ofs = org.integratedmodelling.common.utils.Utils.Strings.spaces(offset); + StringBuffer ret = new StringBuffer(1024); + + ret.append( + ofs + + actuator.getObservable().getDescriptionType().getKdlType() + + " " + + actuator.getObservable().getReferenceName() + + " (\n"); + + for (Actuator child : actuator.getChildren()) { + ret.append(encodeActuator(child, offset + 2, resources)); } - private String encodeServiceCall(ServiceCall contextualizable, int offset, - Map resources) { - // TODO extract resource parameters and substitute with variables - return org.integratedmodelling.common.utils.Utils.Strings.spaces(offset) + contextualizable.encode( - Language.KOBSERVATION); + boolean done = false; + for (ServiceCall contextualizable : actuator.getComputation()) { + if (!done) { + ret.append(ofs + ofs + "compute\n"); + } + ret.append(encodeServiceCall(contextualizable, offset + 6, resources) + "\n"); + done = true; } - /** - * Replicate a remote scope in the scope manager. This should be called by the runtime service after - * creating it so if the scope has no ID we issue an error, as we do not create independent scopes. - * - * @param sessionScope a client scope that should record the ID for future communication. If the ID is - * null, the call has failed. - * @return + /* + * ? coverage */ - @Override - public String registerSession(SessionScope sessionScope) { - - if (sessionScope instanceof ServiceSessionScope serviceSessionScope) { - - if (sessionScope.getId() == null) { - throw new KlabIllegalArgumentException("resolver: session scope has no ID, cannot register " + - "a scope autonomously"); - } - getScopeManager().registerScope(serviceSessionScope, capabilities(sessionScope).getBrokerURI()); - return serviceSessionScope.getId(); - } - - throw new KlabIllegalArgumentException("unexpected scope class"); + // ret.append(ofs + ")" + (actuator.getAlias() == null ? "" : (" named " + + // actuator.getAlias + // ())) + // + (actuator.getObservable().getObserver() == null + // ? "" + // : (" as " + actuator.getObservable().getObserver().getName())) + // + "\n"); + + return ret; + } + + private String encodeServiceCall( + ServiceCall contextualizable, int offset, Map resources) { + // TODO extract resource parameters and substitute with variables + return org.integratedmodelling.common.utils.Utils.Strings.spaces(offset) + + contextualizable.encode(Language.KOBSERVATION); + } + + /** + * Replicate a remote scope in the scope manager. This should be called by the runtime service + * after creating it so if the scope has no ID we issue an error, as we do not create independent + * scopes. + * + * @param sessionScope a client scope that should record the ID for future communication. If the + * ID is null, the call has failed. + * @return + */ + @Override + public String registerSession(SessionScope sessionScope) { + + if (sessionScope instanceof ServiceSessionScope serviceSessionScope) { + + if (sessionScope.getId() == null) { + throw new KlabIllegalArgumentException( + "resolver: session scope has no ID, cannot register " + "a scope autonomously"); + } + + getScopeManager() + .registerScope(serviceSessionScope, capabilities(sessionScope).getBrokerURI()); + return serviceSessionScope.getId(); } - /** - * Replicate a remote scope in the scope manager. This should be called by the runtime service after - * creating it so if the scope has no ID we issue an error, as we do not create independent scopes. - * - * @param contextScope a client scope that should record the ID for future communication. If the ID is - * null, the call has failed. - * @return - */ - @Override - public String registerContext(ContextScope contextScope) { - - contextScope.getData().put(RESOLUTION_GRAPH_KEY, ResolutionGraph.create(contextScope)); - - if (contextScope instanceof ServiceContextScope serviceContextScope) { - - if (contextScope.getId() == null) { - throw new KlabIllegalArgumentException("resolver: context scope has no ID, cannot register " + - "a scope autonomously"); - } - - getScopeManager().registerScope(serviceContextScope, capabilities(contextScope).getBrokerURI()); - return serviceContextScope.getId(); - } - - throw new KlabIllegalArgumentException("unexpected scope class"); - + throw new KlabIllegalArgumentException("unexpected scope class"); + } + + /** + * Replicate a remote scope in the scope manager. This should be called by the runtime service + * after creating it so if the scope has no ID we issue an error, as we do not create independent + * scopes. + * + * @param contextScope a client scope that should record the ID for future communication. If the + * ID is null, the call has failed. + * @return + */ + @Override + public String registerContext(ContextScope contextScope) { + + contextScope.getData().put(RESOLUTION_GRAPH_KEY, ResolutionGraph.create(contextScope)); + + if (contextScope instanceof ServiceContextScope serviceContextScope) { + + if (contextScope.getId() == null) { + throw new KlabIllegalArgumentException( + "resolver: context scope has no ID, cannot register " + "a scope autonomously"); + } + + getScopeManager() + .registerScope(serviceContextScope, capabilities(contextScope).getBrokerURI()); + return serviceContextScope.getId(); } - public static ResolutionGraph getResolutionGraph(ContextScope scope) { - return scope.getData().get(RESOLUTION_GRAPH_KEY, ResolutionGraph.class); - } + throw new KlabIllegalArgumentException("unexpected scope class"); + } + public static ResolutionGraph getResolutionGraph(ContextScope scope) { + return scope.getData().get(RESOLUTION_GRAPH_KEY, ResolutionGraph.class); + } } diff --git a/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourceAdminController.java b/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourceAdminController.java index 374ef9ef2..fb9d25878 100644 --- a/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourceAdminController.java +++ b/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourceAdminController.java @@ -39,180 +39,201 @@ @Tag(name = "Resources service administration API") public class ResourceAdminController { - @Autowired - private ResourcesServer resourcesServer; - - @Autowired - private ServiceAuthorizationManager authenticationManager; - -// @PostMapping(ServicesAPI.RESOURCES.ADMIN.IMPORT_PROJECT) -// public @ResponseBody List importNewProject(@RequestBody ProjectRequest request, -// Principal principal) { -// if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { -// return admin.importProject(request.getWorkspaceName(), request.getProjectUrl(), -// request.isOverwrite(), principal instanceof EngineAuthorization authorization ? -// authorization.getScope(UserScope.class) : null); -// } -// throw new KlabInternalErrorException("Resources service is incapable of admin operation"); -// } - - @PostMapping(ServicesAPI.RESOURCES.ADMIN.CREATE_PROJECT) - public @ResponseBody ResourceSet createNewProject(@PathVariable("workspaceName") String workspaceName, - @PathVariable( - "projectName") String projectName, - Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { - return admin.createProject(workspaceName, projectName, - principal instanceof EngineAuthorization authorization ? - authorization.getScope(UserScope.class) : null); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + @Autowired private ResourcesServer resourcesServer; + + @Autowired private ServiceAuthorizationManager authenticationManager; + + // @PostMapping(ServicesAPI.RESOURCES.ADMIN.IMPORT_PROJECT) + // public @ResponseBody List importNewProject(@RequestBody ProjectRequest request, + // Principal principal) { + // if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { + // return admin.importProject(request.getWorkspaceName(), request.getProjectUrl(), + // request.isOverwrite(), principal instanceof EngineAuthorization + // authorization ? + // authorization.getScope(UserScope.class) : null); + // } + // throw new KlabInternalErrorException("Resources service is incapable of admin + // operation"); + // } + + @PostMapping(ServicesAPI.RESOURCES.ADMIN.CREATE_PROJECT) + public @ResponseBody ResourceSet createNewProject( + @PathVariable("workspaceName") String workspaceName, + @PathVariable("projectName") String projectName, + Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { + return admin.createProject( + workspaceName, + projectName, + principal instanceof EngineAuthorization authorization + ? authorization.getScope(UserScope.class) + : null); } - - @PostMapping(ServicesAPI.RESOURCES.ADMIN.UPDATE_PROJECT) - public @ResponseBody ResourceSet updateExistingProject(@PathVariable("projectName") String projectName, - @RequestBody Project.Manifest manifest, - @RequestBody Metadata metadata, - Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin && principal instanceof EngineAuthorization auth) { - return admin.updateProject(projectName, manifest, metadata, - auth.getScope(UserScope.class)); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + @PostMapping(ServicesAPI.RESOURCES.ADMIN.UPDATE_PROJECT) + public @ResponseBody ResourceSet updateExistingProject( + @PathVariable("projectName") String projectName, + @RequestBody Project.Manifest manifest, + @RequestBody Metadata metadata, + Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin + && principal instanceof EngineAuthorization auth) { + return admin.updateProject(projectName, manifest, metadata, auth.getScope(UserScope.class)); } - - @PostMapping(ServicesAPI.RESOURCES.ADMIN.CREATE_DOCUMENT) - public List createDocument(@PathVariable("projectName") String projectName, - @PathVariable("documentType") ProjectStorage.ResourceType documentType, - @PathVariable("urn") String urn, Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin && principal instanceof EngineAuthorization auth) { - return admin.createDocument(projectName, urn, documentType, - auth.getScope(UserScope.class)); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + @PostMapping(ServicesAPI.RESOURCES.ADMIN.CREATE_DOCUMENT) + public List createDocument( + @PathVariable("projectName") String projectName, + @PathVariable("documentType") ProjectStorage.ResourceType documentType, + @PathVariable("urn") String urn, + Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin + && principal instanceof EngineAuthorization auth) { + return admin.createDocument(projectName, urn, documentType, auth.getScope(UserScope.class)); } - - @PostMapping(ServicesAPI.RESOURCES.ADMIN.UPDATE_DOCUMENT) - public List updateOntology(@PathVariable("projectName") String projectName, - @PathVariable("documentType") ProjectStorage.ResourceType documentType, - @RequestBody String content, Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin && principal instanceof EngineAuthorization auth) { - return admin.updateDocument(projectName, documentType, content, - auth.getScope(UserScope.class)); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + @PostMapping(ServicesAPI.RESOURCES.ADMIN.UPDATE_DOCUMENT) + public List updateOntology( + @PathVariable("projectName") String projectName, + @PathVariable("documentType") ProjectStorage.ResourceType documentType, + @RequestBody String content, + Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin + && principal instanceof EngineAuthorization auth) { + return admin.updateDocument( + projectName, documentType, content, auth.getScope(UserScope.class)); } - -// @PostMapping(ServicesAPI.RESOURCES.ADMIN.IMPORT_RESOURCE) -// public @ResponseBody ResourceSet createResource(@RequestBody Resource resource, Principal principal) { -// if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { -// var urn = admin.createResource(resource, -// principal instanceof EngineAuthorization authorization ? -// authorization.getScope(UserScope.class) : null); -// return null; // TODO create ResourceSet -// } -// throw new KlabInternalErrorException("Resources service is incapable of admin operation"); -// } -// -// @PostMapping(ServicesAPI.RESOURCES.ADMIN.UPLOAD_RESOURCE) -// public @ResponseBody ResourceSet createResourceFromPath(@RequestParam("file") MultipartFile file, -// Principal principal) { -// -// ResourceSet ret = null; -// -// if (resourcesServer.klabService() instanceof ResourcesService.Admin admin && principal instanceof EngineAuthorization auth) { -// var scope = auth.getScope(UserScope.class); -// try { -// File tempDir = Files.createTempDirectory("klab").toFile(); -// File resourcePath = new File(tempDir + File.separator + file.getOriginalFilename()); -// FileUtils.copyInputStreamToFile(file.getInputStream(), resourcePath); -// ret = admin.createResource(resourcePath, scope); -// tempDir.deleteOnExit(); -// } catch (IOException e) { -// scope.error(e); -// } -// -// return ret; -// } -// throw new KlabInternalErrorException("Resources service is incapable of admin operation"); -// } - -// @PostMapping(ServicesAPI.RESOURCES.ADMIN.CREATE_RESOURCE) -// public Resource createResourceForProject(@RequestParam("projectName") String projectName, -// @RequestParam("urnId") String urnId, -// @RequestParam("adapter") String adapter, -// @RequestBody Parameters resourceData, -// Principal principal) { -// if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { -// return admin.createResource(projectName, urnId, adapter, resourceData, -// principal instanceof EngineAuthorization authorization ? -// authorization.getScope(UserScope.class) : null); -// } -// throw new KlabInternalErrorException("Resources service is incapable of admin operation"); -// } - - @PostMapping(ServicesAPI.RESOURCES.ADMIN.REMOVE_PROJECT) - public List removeProject(@RequestParam("projectName") String projectName, - Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin && principal instanceof EngineAuthorization auth) { - admin.deleteProject(projectName, auth.getScope(UserScope.class)); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + // @PostMapping(ServicesAPI.RESOURCES.ADMIN.IMPORT_RESOURCE) + // public @ResponseBody ResourceSet createResource(@RequestBody Resource resource, Principal + // principal) { + // if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { + // var urn = admin.createResource(resource, + // principal instanceof EngineAuthorization authorization ? + // authorization.getScope(UserScope.class) : null); + // return null; // TODO create ResourceSet + // } + // throw new KlabInternalErrorException("Resources service is incapable of admin + // operation"); + // } + // + // @PostMapping(ServicesAPI.RESOURCES.ADMIN.UPLOAD_RESOURCE) + // public @ResponseBody ResourceSet createResourceFromPath(@RequestParam("file") MultipartFile + // file, + // Principal principal) { + // + // ResourceSet ret = null; + // + // if (resourcesServer.klabService() instanceof ResourcesService.Admin admin && principal + // instanceof EngineAuthorization auth) { + // var scope = auth.getScope(UserScope.class); + // try { + // File tempDir = Files.createTempDirectory("klab").toFile(); + // File resourcePath = new File(tempDir + File.separator + + // file.getOriginalFilename()); + // FileUtils.copyInputStreamToFile(file.getInputStream(), resourcePath); + // ret = admin.createResource(resourcePath, scope); + // tempDir.deleteOnExit(); + // } catch (IOException e) { + // scope.error(e); + // } + // + // return ret; + // } + // throw new KlabInternalErrorException("Resources service is incapable of admin + // operation"); + // } + + // @PostMapping(ServicesAPI.RESOURCES.ADMIN.CREATE_RESOURCE) + // public Resource createResourceForProject(@RequestParam("projectName") String projectName, + // @RequestParam("urnId") String urnId, + // @RequestParam("adapter") String adapter, + // @RequestBody Parameters resourceData, + // Principal principal) { + // if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { + // return admin.createResource(projectName, urnId, adapter, resourceData, + // principal instanceof EngineAuthorization authorization ? + // authorization.getScope(UserScope.class) : null); + // } + // throw new KlabInternalErrorException("Resources service is incapable of admin + // operation"); + // } + + @PostMapping(ServicesAPI.RESOURCES.ADMIN.REMOVE_PROJECT) + public List removeProject( + @RequestParam("projectName") String projectName, Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin + && principal instanceof EngineAuthorization auth) { + admin.deleteProject(projectName, auth.getScope(UserScope.class)); } - - @PostMapping(ServicesAPI.RESOURCES.ADMIN.REMOVE_WORKSPACE) - public List removeWorkspace(@RequestParam("workspaceName") String workspaceName, - Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin && principal instanceof EngineAuthorization auth) { - admin.deleteWorkspace(workspaceName, auth.getScope(UserScope.class)); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + @PostMapping(ServicesAPI.RESOURCES.ADMIN.REMOVE_WORKSPACE) + public List removeWorkspace( + @RequestParam("workspaceName") String workspaceName, Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin + && principal instanceof EngineAuthorization auth) { + admin.deleteWorkspace(workspaceName, auth.getScope(UserScope.class)); } - - @GetMapping(value = ServicesAPI.RESOURCES.LIST_PROJECTS, produces = MediaType.APPLICATION_JSON_VALUE) - public @ResponseBody Collection listProjects(Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { - return admin.listProjects(principal instanceof EngineAuthorization authorization ? - authorization.getScope() : null); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + @GetMapping( + value = ServicesAPI.RESOURCES.LIST_PROJECTS, + produces = MediaType.APPLICATION_JSON_VALUE) + public @ResponseBody Collection listProjects(Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { + return admin.listProjects( + principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); } - - @GetMapping(value = ServicesAPI.RESOURCES.LIST_RESOURCE_URNS, produces = MediaType.APPLICATION_JSON_VALUE) - public @ResponseBody Collection listResourceUrns(Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { - return admin.listResourceUrns(principal instanceof EngineAuthorization authorization ? - authorization.getScope() : null); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + @GetMapping( + value = ServicesAPI.RESOURCES.LIST_RESOURCE_URNS, + produces = MediaType.APPLICATION_JSON_VALUE) + public @ResponseBody Collection listResourceUrns(Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { + return admin.listResourceUrns( + principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); } - - @GetMapping(value = ServicesAPI.RESOURCES.ADMIN.LOCK_PROJECT) - public URL lockProject(@PathVariable("urn") String urn, Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin && principal instanceof EngineAuthorization auth) { - return admin.lockProject(urn, auth.getScope(UserScope.class)); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + @GetMapping(value = ServicesAPI.RESOURCES.ADMIN.LOCK_PROJECT) + public URL lockProject(@PathVariable("urn") String urn, Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin + && principal instanceof EngineAuthorization auth) { + return admin.lockProject(urn, auth.getScope(UserScope.class)); } - - @GetMapping(value = ServicesAPI.RESOURCES.ADMIN.UNLOCK_PROJECT) - public boolean unlockProject(@PathVariable("urn") String urn, Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin && principal instanceof EngineAuthorization auth) { - return admin.unlockProject(urn, auth.getScope(UserScope.class)); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + @GetMapping(value = ServicesAPI.RESOURCES.ADMIN.UNLOCK_PROJECT) + public boolean unlockProject(@PathVariable("urn") String urn, Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin + && principal instanceof EngineAuthorization auth) { + return admin.unlockProject(urn, auth.getScope(UserScope.class)); } - - @PostMapping(value = ServicesAPI.RESOURCES.ADMIN.MANAGE_PROJECT) - public List manageProject(@PathVariable("urn") String urn, - @RequestBody ProjectRequest request, - Principal principal) { - if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { - return admin.manageRepository(urn, request.getOperation(), - request.getParameters().toArray(new String[]{})); - } - throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } + + @PostMapping(value = ServicesAPI.RESOURCES.ADMIN.MANAGE_PROJECT) + public List manageProject( + @PathVariable("urn") String urn, @RequestBody ProjectRequest request, Principal principal) { + if (resourcesServer.klabService() instanceof ResourcesService.Admin admin) { + return admin.manageRepository( + urn, request.getOperation(), request.getParameters().toArray(new String[] {})); } - - + throw new KlabInternalErrorException("Resources service is incapable of admin operation"); + } } diff --git a/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourceCRUDController.java b/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourceCRUDController.java index 0e3b68484..973874551 100644 --- a/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourceCRUDController.java +++ b/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourceCRUDController.java @@ -14,58 +14,46 @@ @Tag(name = "Resources CRUD support") public class ResourceCRUDController { - @Autowired - private ResourcesServer resourcesServer; - - @Autowired - private ServiceAuthorizationManager authenticationManager; - - /** - * GET endpoint that returns uncontextualized resource data to the authorized user. If the resource is - * public, the endpoint works w/o authentication. - * - * @param urn - * @param principal - */ - public void getResource(String urn, Principal principal) { - } - - /** - * POST endpoint that takes geometry and (possibly) value URLs for dependencies from the - * {@link org.integratedmodelling.klab.api.digitaltwin.DigitalTwin} and returns the contextualized data as - * a flow. - */ - @Secured(Role.USER) - public void contextualizeResource(Principal principal) { - - } - - /** - * PATCH endpoint that modifies a resource with new content, tracking all operations - */ - @Secured(Role.USER) - public void updateResource(Principal principal) { - - } - - /** - * PUT endpoint that creates a new resource, returning the new URN (takes hints for it but may not follow - * them). Uses multipart data. Note that permissions are handled separately; at creation, the resource is - * only available to the creating identity - */ - @Secured(Role.USER) - public String createResource(Principal principal) { - return null; - } - - /** - * DELETE endpoint that deletes a resource. - * - * @param urn - */ - @Secured(Role.USER) - public void deleteResource(String urn, Principal principal) { - - } - + @Autowired private ResourcesServer resourcesServer; + + @Autowired private ServiceAuthorizationManager authenticationManager; + + /** + * GET endpoint that returns uncontextualized resource data to the authorized user. If the + * resource is public, the endpoint works w/o authentication. + * + * @param urn + * @param principal + */ + public void getResource(String urn, Principal principal) {} + + /** + * POST endpoint that takes geometry and (possibly) value URLs for dependencies from the {@link + * org.integratedmodelling.klab.api.digitaltwin.DigitalTwin} and returns the contextualized data + * as a flow. + */ + @Secured(Role.USER) + public void contextualizeResource(Principal principal) {} + + /** PATCH endpoint that modifies a resource with new content, tracking all operations */ + @Secured(Role.USER) + public void updateResource(Principal principal) {} + + /** + * PUT endpoint that creates a new resource, returning the new URN (takes hints for it but may not + * follow them). Uses multipart data. Note that permissions are handled separately; at creation, + * the resource is only available to the creating identity + */ + @Secured(Role.USER) + public String createResource(Principal principal) { + return null; + } + + /** + * DELETE endpoint that deletes a resource. + * + * @param urn + */ + @Secured(Role.USER) + public void deleteResource(String urn, Principal principal) {} } diff --git a/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourcesProviderController.java b/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourcesProviderController.java index 2ab8e772a..c84ab6928 100644 --- a/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourcesProviderController.java +++ b/klab.services.resources.server/src/main/java/org/integratedmodelling/resources/server/controllers/ResourcesProviderController.java @@ -50,250 +50,354 @@ @Tag(name = "Resources service core API") public class ResourcesProviderController { - @Autowired - private ResourcesServer resourcesServer; - - @Autowired - private ServiceAuthorizationManager authenticationManager; - - /** - * Retrieve all the knowledge included in one or more projects. The return set contains all needed - * documnents with their versions, in order of dependency. - * - * @param projects - * @param principal - * @return the resources to load to ingest the knowledge included in the requested projects - */ - @GetMapping(ServicesAPI.RESOURCES.PROJECTS) - public @ResponseBody List getProjects(@RequestParam Collection projects, - Principal principal) { - return resourcesServer.klabService().projects(projects, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.PROJECT) - public @ResponseBody Project getProject(@PathVariable("projectName") String projectName, - Principal principal) { - return resourcesServer.klabService().resolveProject(projectName, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.MODEL) - public @ResponseBody ResourceSet getModel(@PathVariable("modelName") String modelName, - Principal principal) { - return resourcesServer.klabService().model(modelName, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_URN) - public @ResponseBody ResourceSet resolve(@PathVariable("urn") String urn, Principal principal) { - return resourcesServer.klabService().resolve(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_NAMESPACE_URN) - public @ResponseBody KimNamespace resolveNamespace(@PathVariable("urn") String urn, Principal principal) { - return resourcesServer.klabService().resolveNamespace(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_ONTOLOGY_URN) - public @ResponseBody KimOntology resolveOntology(@PathVariable("urn") String urn, Principal principal) { - return resourcesServer.klabService().resolveOntology(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_OBSERVATION_STRATEGY_DOCUMENT_URN) - public @ResponseBody KimObservationStrategyDocument resolveObservationStrategyDocument(@PathVariable( - "urn") String urn, Principal principal) { - return resourcesServer.klabService().resolveObservationStrategyDocument(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.LIST_WORKSPACES) - public @ResponseBody Collection listWorkspaces() { - return resourcesServer.klabService().listWorkspaces(); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_BEHAVIOR_URN) - public @ResponseBody KActorsBehavior resolveBehavior(@PathVariable("urn") String urn, - Principal principal) { - return resourcesServer.klabService().resolveBehavior(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - /** - * TODO this should be just RESOURCE and take all methods for the various CRUD ops: - * GET for data relative to the resource including status and main content; - * POST for contextualization with a ResolutionRequest; - * PUT to create new; - * PATCH to update; - * DELETE to delete. - * - * @param urn - * @param principal - * @return - */ - @GetMapping(ServicesAPI.RESOURCES.RESOURCE) - public @ResponseBody Resource resolveResource(@PathVariable("urn") String urn, Principal principal) { - return resourcesServer.klabService().resolveResource(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_WORKSPACE_URN) - public @ResponseBody Workspace resolveWorkspace(@PathVariable("urn") String urn, Principal principal) { - return resourcesServer.klabService().resolveWorkspace(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_SERVICE_CALL) - public @ResponseBody ResourceSet resolveServiceCall(@PathVariable("name") String name, - @PathVariable(value = "version", required = false) String version, - Principal principal) { - Version v = version == null ? null : Version.create(version); - return resourcesServer.klabService().resolveServiceCall(name, v, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOURCE_STATUS) - public @ResponseBody ResourceStatus resourceStatus(@PathVariable("urn") String urn, Principal principal) { - return resourcesServer.klabService().resourceStatus(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_OBSERVABLE) - public @ResponseBody KimObservable resolveObservable(@RequestParam("definition") String definition) { - return resourcesServer.klabService().resolveObservable(definition); - } - - @GetMapping(ServicesAPI.RESOURCES.DESCRIBE_CONCEPT) - public @ResponseBody KimConcept.Descriptor describeConcept(@PathVariable("conceptUrn") String conceptUrn) { - return resourcesServer.klabService().describeConcept(conceptUrn); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_CONCEPT) - public @ResponseBody KimConcept resolveConcept(@PathVariable("definition") String definition) { - return resourcesServer.klabService().resolveConcept(definition); - } - - /** - * This one creates the DataRequest from the binary input stream coming from the client. The request may - * include input data in an {@link org.integratedmodelling.klab.common.data.Instance} field. - * - * @param requestStream - * @param response - * @param principal - */ - @PostMapping(value = ServicesAPI.RESOURCES.CONTEXTUALIZE, consumes = - MediaType.APPLICATION_OCTET_STREAM_VALUE) - public void contextualize(InputStream requestStream, HttpServletResponse response, Principal principal) { - - if (principal instanceof EngineAuthorization authorization) { - - try { - var decoder = DecoderFactory.get().binaryDecoder(requestStream, null); - var reader = new SpecificDatumReader<>(DataRequest.class); - var request = reader.read(null, decoder); - var scope = authorization.getScope(); - var resource = - resourcesServer.klabService().resolveResource(request.getResourceUrn().toString(), - scope); - - Data input = null; - if (request.getInputData() != null) { - input = new DataImpl(request.getInputData()); - } - - var data = resourcesServer.klabService().contextualize(resource, - GeometryRepository.INSTANCE.get(request.getGeometry().toString(), Geometry.class), - input, scope); - - if (data instanceof DataImpl dataImpl) { - try { - var output = response.getOutputStream(); - dataImpl.copyTo(output); - output.flush(); - return; - } catch (Throwable t) { - throw new KlabResourceAccessException(t); - } - } - } catch (IOException e) { - throw new KlabIOException(e); - } - + @Autowired private ResourcesServer resourcesServer; + + @Autowired private ServiceAuthorizationManager authenticationManager; + + /** + * Retrieve all the knowledge included in one or more projects. The return set contains all needed + * documnents with their versions, in order of dependency. + * + * @param projects + * @param principal + * @return the resources to load to ingest the knowledge included in the requested projects + */ + @GetMapping(ServicesAPI.RESOURCES.PROJECTS) + public @ResponseBody List getProjects( + @RequestParam Collection projects, Principal principal) { + return resourcesServer + .klabService() + .projects( + projects, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.PROJECT) + public @ResponseBody Project getProject( + @PathVariable("projectName") String projectName, Principal principal) { + return resourcesServer + .klabService() + .resolveProject( + projectName, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.MODEL) + public @ResponseBody ResourceSet getModel( + @PathVariable("modelName") String modelName, Principal principal) { + return resourcesServer + .klabService() + .model( + modelName, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_URN) + public @ResponseBody ResourceSet resolve(@PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .resolve( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_NAMESPACE_URN) + public @ResponseBody KimNamespace resolveNamespace( + @PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .resolveNamespace( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_ONTOLOGY_URN) + public @ResponseBody KimOntology resolveOntology( + @PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .resolveOntology( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_OBSERVATION_STRATEGY_DOCUMENT_URN) + public @ResponseBody KimObservationStrategyDocument resolveObservationStrategyDocument( + @PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .resolveObservationStrategyDocument( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.LIST_WORKSPACES) + public @ResponseBody Collection listWorkspaces() { + return resourcesServer.klabService().listWorkspaces(); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_BEHAVIOR_URN) + public @ResponseBody KActorsBehavior resolveBehavior( + @PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .resolveBehavior( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + /** + * TODO this should be just RESOURCE and take all methods for the various CRUD ops: GET for data + * relative to the resource including status and main content; POST for contextualization with a + * ResolutionRequest; PUT to create new; PATCH to update; DELETE to delete. + * + * @param urn + * @param principal + * @return + */ + @GetMapping(ServicesAPI.RESOURCES.RESOURCE) + public @ResponseBody Resource resolveResource( + @PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .resolveResource( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_WORKSPACE_URN) + public @ResponseBody Workspace resolveWorkspace( + @PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .resolveWorkspace( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_SERVICE_CALL) + public @ResponseBody ResourceSet resolveServiceCall( + @PathVariable("name") String name, + @PathVariable(value = "version", required = false) String version, + Principal principal) { + Version v = version == null ? null : Version.create(version); + return resourcesServer + .klabService() + .resolveServiceCall( + name, + v, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOURCE_STATUS) + public @ResponseBody ResourceStatus resourceStatus( + @PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .resourceStatus( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_OBSERVABLE) + public @ResponseBody KimObservable resolveObservable( + @RequestParam("definition") String definition) { + return resourcesServer.klabService().resolveObservable(definition); + } + + @GetMapping(ServicesAPI.RESOURCES.DESCRIBE_CONCEPT) + public @ResponseBody KimConcept.Descriptor describeConcept( + @PathVariable("conceptUrn") String conceptUrn) { + return resourcesServer.klabService().describeConcept(conceptUrn); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_CONCEPT) + public @ResponseBody KimConcept resolveConcept(@PathVariable("definition") String definition) { + return resourcesServer.klabService().resolveConcept(definition); + } + + /** + * This one creates the DataRequest from the binary input stream coming from the client. The + * request may include input data in an {@link org.integratedmodelling.klab.common.data.Instance} + * field. + * + * @param requestStream + * @param response + * @param principal + */ + @PostMapping( + value = ServicesAPI.RESOURCES.CONTEXTUALIZE, + consumes = MediaType.APPLICATION_OCTET_STREAM_VALUE) + public void contextualize( + InputStream requestStream, HttpServletResponse response, Principal principal) { + + if (principal instanceof EngineAuthorization authorization) { + + try { + var decoder = DecoderFactory.get().binaryDecoder(requestStream, null); + var reader = new SpecificDatumReader<>(DataRequest.class); + var request = reader.read(null, decoder); + var scope = authorization.getScope(); + var resource = + resourcesServer + .klabService() + .resolveResource(request.getResourceUrn().toString(), scope); + + Data input = null; + if (request.getInputData() != null) { + input = new DataImpl(request.getInputData()); } - throw new KlabIllegalStateException("Resource contextualizer: found unexpected implementations"); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_DATAFLOW_URN) - public @ResponseBody KimObservationStrategyDocument resolveDataflow(@PathVariable("urn") String urn, - Principal principal) { - return resourcesServer.klabService().resolveDataflow(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @GetMapping(ServicesAPI.RESOURCES.GET_WORLDVIEW) - public @ResponseBody Worldview getWorldview() { - return resourcesServer.klabService().getWorldview(); - } - - @GetMapping(ServicesAPI.RESOURCES.DEPENDENTS) - public @ResponseBody List dependents(@PathVariable("namespaceId") String namespaceId) { - return resourcesServer.klabService().dependents(namespaceId); - } - - @GetMapping(ServicesAPI.RESOURCES.PRECURSORS) - public List precursors(@PathVariable("namespaceId") String namespaceId) { - return resourcesServer.klabService().precursors(namespaceId); - } - - @GetMapping(ServicesAPI.RESOURCES.QUERY_RESOURCES) - public List queryResources(@RequestParam("urnPattern") String urnPattern, - @RequestParam("resourceTypes") KlabAsset.KnowledgeClass... resourceTypes) { - return resourcesServer.klabService().queryResources(urnPattern, resourceTypes); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOLVE_PROJECT) - public @ResponseBody Project resolveProject(@PathVariable("projectName") String projectName, - Principal principal) { - return resourcesServer.klabService().resolveProject(projectName, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - // FIXME use POST and a ResolutionRequest - @PostMapping(ServicesAPI.RESOURCES.QUERY_MODELS) - public @ResponseBody ResourceSet queryModels(@RequestBody ResolutionRequest request, - Principal principal) { - return resourcesServer.klabService().queryModels(request.getObservable(), - principal instanceof EngineAuthorization authorization ? - authorization.getScope(ContextScope.class) - .withResolutionConstraints(request.getResolutionConstraints().toArray(new ResolutionConstraint[0])) : null); - } - - @GetMapping(ServicesAPI.RESOURCES.MODEL_GEOMETRY) - public @ResponseBody Coverage modelGeometry(@PathVariable("modelUrn") String modelUrn) { - return resourcesServer.klabService().modelGeometry(modelUrn); - } - - @GetMapping(ServicesAPI.RESOURCES.READ_BEHAVIOR) - public @ResponseBody KActorsBehavior readBehavior(@RequestParam("url") URL url) { - return resourcesServer.klabService().readBehavior(url); - } - - @GetMapping(ServicesAPI.RESOURCES.RESOURCE_RIGHTS) - public ResourcePrivileges getResourceRights(@PathVariable("urn") String urn, Principal principal) { - return resourcesServer.klabService().getRights(urn, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); - } - - @PutMapping(ServicesAPI.RESOURCES.RESOURCE_RIGHTS) - public boolean setResourceRights(@PathVariable("urn") String urn, - @RequestBody ResourcePrivileges resourcePrivileges, - Principal principal) { - return resourcesServer.klabService().setRights(urn, resourcePrivileges, - principal instanceof EngineAuthorization authorization ? authorization.getScope() : null); + var data = + resourcesServer + .klabService() + .contextualize( + resource, + GeometryRepository.INSTANCE.get( + request.getGeometry().toString(), Geometry.class), + input, + scope); + + if (data instanceof DataImpl dataImpl) { + try { + var output = response.getOutputStream(); + dataImpl.copyTo(output); + output.flush(); + return; + } catch (Throwable t) { + throw new KlabResourceAccessException(t); + } + } + } catch (IOException e) { + throw new KlabIOException(e); + } } -} \ No newline at end of file + throw new KlabIllegalStateException( + "Resource contextualizer: found unexpected implementations"); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_DATAFLOW_URN) + public @ResponseBody KimObservationStrategyDocument resolveDataflow( + @PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .resolveDataflow( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.GET_WORLDVIEW) + public @ResponseBody Worldview getWorldview() { + return resourcesServer.klabService().getWorldview(); + } + + @GetMapping(ServicesAPI.RESOURCES.DEPENDENTS) + public @ResponseBody List dependents( + @PathVariable("namespaceId") String namespaceId) { + return resourcesServer.klabService().dependents(namespaceId); + } + + @GetMapping(ServicesAPI.RESOURCES.PRECURSORS) + public List precursors(@PathVariable("namespaceId") String namespaceId) { + return resourcesServer.klabService().precursors(namespaceId); + } + + @GetMapping(ServicesAPI.RESOURCES.QUERY_RESOURCES) + public List queryResources( + @RequestParam("urnPattern") String urnPattern, + @RequestParam("resourceTypes") KlabAsset.KnowledgeClass... resourceTypes) { + return resourcesServer.klabService().queryResources(urnPattern, resourceTypes); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOLVE_PROJECT) + public @ResponseBody Project resolveProject( + @PathVariable("projectName") String projectName, Principal principal) { + return resourcesServer + .klabService() + .resolveProject( + projectName, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + // FIXME use POST and a ResolutionRequest + @PostMapping(ServicesAPI.RESOURCES.QUERY_MODELS) + public @ResponseBody ResourceSet queryModels( + @RequestBody ResolutionRequest request, Principal principal) { + return resourcesServer + .klabService() + .queryModels( + request.getObservable(), + principal instanceof EngineAuthorization authorization + ? authorization + .getScope(ContextScope.class) + .withResolutionConstraints( + request.getResolutionConstraints().toArray(new ResolutionConstraint[0])) + : null); + } + + @GetMapping(ServicesAPI.RESOURCES.MODEL_GEOMETRY) + public @ResponseBody Coverage modelGeometry(@PathVariable("modelUrn") String modelUrn) { + return resourcesServer.klabService().modelGeometry(modelUrn); + } + + @GetMapping(ServicesAPI.RESOURCES.READ_BEHAVIOR) + public @ResponseBody KActorsBehavior readBehavior(@RequestParam("url") URL url) { + return resourcesServer.klabService().readBehavior(url); + } + + @GetMapping(ServicesAPI.RESOURCES.RESOURCE_RIGHTS) + public ResourcePrivileges getResourceRights( + @PathVariable("urn") String urn, Principal principal) { + return resourcesServer + .klabService() + .getRights( + urn, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } + + @PutMapping(ServicesAPI.RESOURCES.RESOURCE_RIGHTS) + public boolean setResourceRights( + @PathVariable("urn") String urn, + @RequestBody ResourcePrivileges resourcePrivileges, + Principal principal) { + return resourcesServer + .klabService() + .setRights( + urn, + resourcePrivileges, + principal instanceof EngineAuthorization authorization + ? authorization.getScope() + : null); + } +} diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/ResourcesProvider.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/ResourcesProvider.java index 4804fc134..e6e2c8791 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/ResourcesProvider.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/ResourcesProvider.java @@ -68,1104 +68,1175 @@ import java.util.stream.Collectors; @Service -public class ResourcesProvider extends BaseService implements ResourcesService, ResourcesService.Admin { +public class ResourcesProvider extends BaseService + implements ResourcesService, ResourcesService.Admin { + + private final String hardwareSignature = Utils.Names.getHardwareId(); + private final WorkspaceManager workspaceManager; + private final ResourcesKBox resourcesKbox; + + /** + * We keep a hash of all the resource URNs we serve for quick reference and search + * + * @deprecated use {@link + * org.integratedmodelling.klab.services.resources.persistence.ResourcesKBox} + */ + private Set localResources = new HashSet<>(); + + /** + * the only persistent info in this implementation is the catalog of resource status info. This is + * used for individual resources and whole projects. It also holds and maintains the review + * status, which in the case of projects propagates to the namespaces and models. Reviews and the + * rest of the editorial material should be part of the provenance info associated to the items. + * The review process is organized and maintained in the community service; only its initiation + * and the storage of the review status is the job of the resources service. + * + * @deprecated use {@link + * org.integratedmodelling.klab.services.resources.persistence.ResourcesKBox} + */ + private DB db = null; + + private ConcurrentNavigableMap catalog = null; + + /** + * @deprecated use {@link + * org.integratedmodelling.klab.services.resources.persistence.ResourcesKBox} + */ + private ModelKbox kbox; + + // set to true when the connected reasoner becomes operational + private boolean semanticSearchAvailable = false; + /* + * "fair" read/write lock to ensure no reading during updates + */ + private final ReadWriteLock updateLock = new ReentrantReadWriteLock(true); + + @SuppressWarnings("unchecked") + public ResourcesProvider(AbstractServiceDelegatingScope scope, ServiceStartupOptions options) { + + super(scope, Type.RESOURCES, options); + + ServiceConfiguration.INSTANCE.setMainService(this); - - private final String hardwareSignature = Utils.Names.getHardwareId(); - private final WorkspaceManager workspaceManager; - private final ResourcesKBox resourcesKbox; - - /** - * We keep a hash of all the resource URNs we serve for quick reference and search - * - * @deprecated use {@link org.integratedmodelling.klab.services.resources.persistence.ResourcesKBox} + /* + Find out any Instance-annotated classes before we read anything */ - private Set localResources = new HashSet<>(); + scanPackages( + (annotation, annotated) -> { + if (!LanguageAdapter.INSTANCE.registerInstanceClass(annotation, annotated)) { + Logging.INSTANCE.error( + "Configuration error: multiple definitions, cannot redefine instance" + + " " + + "implementation " + + annotation.value()); + serviceNotifications() + .add( + Notification.create( + "Configuration error: multiple definitions, " + + "cannot redefine instance" + + " " + + "implementation " + + annotation.value(), + Notification.Level.Error)); + } + }, + Instance.class); + + this.kbox = ModelKbox.create(this); + this.workspaceManager = + new WorkspaceManager(scope, getStartupOptions(), this, this::resolveRemoteProject); + + this.resourcesKbox = new ResourcesKBox(scope, options, this); + + // FIXME remove along with MapDB and catalog + this.db = + DBMaker.fileDB( + getConfigurationSubdirectory(options, "catalog") + File.separator + "resources.db") + .transactionEnable() + .closeOnJvmShutdown() + .make(); + this.catalog = + db.treeMap("resourcesCatalog", GroupSerializer.STRING, GroupSerializer.JAVA).createOrOpen(); - /** - * the only persistent info in this implementation is the catalog of resource status info. This is used - * for individual resources and whole projects. It also holds and maintains the review status, which in - * the case of projects propagates to the namespaces and models. Reviews and the rest of the editorial - * material should be part of the provenance info associated to the items. The review process is organized - * and maintained in the community service; only its initiation and the storage of the review status is - * the job of the resources service. - * - * @deprecated use {@link org.integratedmodelling.klab.services.resources.persistence.ResourcesKBox} - */ - private DB db = null; - private ConcurrentNavigableMap catalog = null; - /** - * @deprecated use {@link org.integratedmodelling.klab.services.resources.persistence.ResourcesKBox} + /* + initialize the plugin system to handle components */ - private ModelKbox kbox; - // set to true when the connected reasoner becomes operational - private boolean semanticSearchAvailable = false; + getComponentRegistry() + .initializeComponents( + this.workspaceManager.getConfiguration(), + getConfigurationSubdirectory(options, "components")); + + // load predefined runtime libraries + getComponentRegistry() + .loadExtensions( + "org.integratedmodelling.klab.runtime.libraries", + "org.integratedmodelling.klab.services.resources.library"); + } + + public Project resolveRemoteProject(String projectId) { + // TODO + System.out.println("TODO resolve external project " + projectId); + return null; + } + + @Override + public void initializeService() { + + Logging.INSTANCE.setSystemIdentifier("Resources service: "); + + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceInitializing, + capabilities(serviceScope()).toString()); + + // this.workspaceManager.loadWorkspace(); /* - * "fair" read/write lock to ensure no reading during updates + * TODO launch update service */ - private final ReadWriteLock updateLock = new ReentrantReadWriteLock(true); - - @SuppressWarnings("unchecked") - public ResourcesProvider(AbstractServiceDelegatingScope scope, ServiceStartupOptions options) { - - super(scope, Type.RESOURCES, options); - - ServiceConfiguration.INSTANCE.setMainService(this); - - /* - Find out any Instance-annotated classes before we read anything - */ - scanPackages((annotation, annotated) -> { - if (!LanguageAdapter.INSTANCE.registerInstanceClass(annotation, annotated)) { - Logging.INSTANCE.error( - "Configuration error: multiple definitions, cannot redefine instance" + " " + - "implementation " + annotation.value()); - serviceNotifications().add(Notification.create( - "Configuration error: multiple definitions, " + "cannot redefine instance" + " " + - "implementation " + annotation.value(), - Notification.Level.Error)); - } - }, Instance.class); - - - this.kbox = ModelKbox.create(this); - this.workspaceManager = new WorkspaceManager(scope, getStartupOptions(), this, - this::resolveRemoteProject); - - this.resourcesKbox = new ResourcesKBox(scope, options, this); - - // FIXME remove along with MapDB and catalog - this.db = DBMaker.fileDB(getConfigurationSubdirectory(options, - "catalog") + File.separator + "resources.db").transactionEnable().closeOnJvmShutdown().make(); - this.catalog = db.treeMap("resourcesCatalog", GroupSerializer.STRING, - GroupSerializer.JAVA).createOrOpen(); - - /* - initialize the plugin system to handle components - */ - getComponentRegistry().initializeComponents(this.workspaceManager.getConfiguration(), - getConfigurationSubdirectory(options, "components")); - - // load predefined runtime libraries - getComponentRegistry().loadExtensions("org.integratedmodelling.klab.runtime.libraries", - "org.integratedmodelling.klab.services.resources.library"); - - } - - public Project resolveRemoteProject(String projectId) { - // TODO - System.out.println("TODO resolve external project " + projectId); - return null; - } - - @Override - public void initializeService() { - - Logging.INSTANCE.setSystemIdentifier("Resources service: "); - - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceInitializing, - capabilities(serviceScope()).toString()); - - - // this.workspaceManager.loadWorkspace(); - /* - * TODO launch update service - */ - - /** - * Setup an embedded broker, possibly to be shared with other services, if we're local and there - * is no configured broker. - */ - if (Utils.URLs.isLocalHost( - this.getUrl()) && workspaceManager.getConfiguration().getBrokerURI() == null) { - this.embeddedBroker = new EmbeddedBroker(); - } - - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceAvailable, - capabilities(serviceScope())); - } - - @Override - public boolean operationalizeService() { - var reasoner = serviceScope().getService(Reasoner.class); - if (reasoner.status().isOperational()) { - Logging.INSTANCE.info("Reasoner is available: indexing semantic assets"); - indexKnowledge(); - this.semanticSearchAvailable = true; - } else { - Logging.INSTANCE.warn("reasoner is inoperative: cannot index semantic content"); - this.semanticSearchAvailable = false; - } - return true; - } /** - * Return whatever worldview is defined in this service, using any other services necessary, or an empty - * set if none is available. - *

- * TODO we may support >1 worldviews at this level and pass the worldview name. - * - * @return + * Setup an embedded broker, possibly to be shared with other services, if we're local and there + * is no configured broker. */ - public Worldview getWorldview() { - return this.workspaceManager.getWorldview(); + if (Utils.URLs.isLocalHost(this.getUrl()) + && workspaceManager.getConfiguration().getBrokerURI() == null) { + this.embeddedBroker = new EmbeddedBroker(); } - /** - * Called after startup and by the update timer at regular intervals. TODO must check if changes were made - * and reload the affected workspaces if so. - *

- * Projects with update frequency == 0 do not get updated. + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceAvailable, + capabilities(serviceScope())); + } + + @Override + public boolean operationalizeService() { + var reasoner = serviceScope().getService(Reasoner.class); + if (reasoner.status().isOperational()) { + Logging.INSTANCE.info("Reasoner is available: indexing semantic assets"); + indexKnowledge(); + this.semanticSearchAvailable = true; + } else { + Logging.INSTANCE.warn("reasoner is inoperative: cannot index semantic content"); + this.semanticSearchAvailable = false; + } + return true; + } + + /** + * Return whatever worldview is defined in this service, using any other services necessary, or an + * empty set if none is available. + * + *

TODO we may support >1 worldviews at this level and pass the worldview name. + * + * @return + */ + public Worldview getWorldview() { + return this.workspaceManager.getWorldview(); + } + + /** + * Called after startup and by the update timer at regular intervals. TODO must check if changes + * were made and reload the affected workspaces if so. + * + *

Projects with update frequency == 0 do not get updated. + */ + private void loadResources(File resourceDir, ProjectImpl project, int level, boolean legacy) { + + /* + * load new and legacy resources. This thing returns null if the dir does not + * exist. */ - private void loadResources(File resourceDir, ProjectImpl project, int level, boolean legacy) { - - /* - * load new and legacy resources. This thing returns null if the dir does not - * exist. - */ - File[] files = resourceDir.listFiles(new FileFilter() { - @Override - public boolean accept(File pathname) { + File[] files = + resourceDir.listFiles( + new FileFilter() { + @Override + public boolean accept(File pathname) { return pathname.isDirectory() && pathname.canRead(); - } - }); - if (files != null) { - for (File subdir : files) { - Resource resource = null; - if ("unreviewed".equals(Utils.Files.getFileBaseName(subdir))) { - loadResources(subdir, project, 0, false); - } else if ("staging".equals(Utils.Files.getFileBaseName(subdir))) { - loadResources(subdir, project, 1, false); - } else { - // CHUPA CHUPA - // resource = KimAdapter.adaptResource(Utils.Json - // .load(new File(subdir + File.separator + "resource - // .json"), - // ResourceReference.class)); - } - if (resource != null) { - localResources.add(resource.getUrn()); - ResourceStatus status = catalog.get(resource.getUrn()); - if (status == null) { - status = new ResourceStatus(); - status.setReviewStatus(level); - status.setFileLocation(subdir); - status.setType(Utils.Notifications.hasErrors( - resource.getNotifications()) ? ResourceStatus.Type.OFFLINE : - ResourceStatus.Type.AVAILABLE); - status.setLegacy(legacy); - status.setKnowledgeClass(KnowledgeClass.RESOURCE); - // TODO fill in the rest - catalog.put(resource.getUrn(), status); - } - } - } + } + }); + if (files != null) { + for (File subdir : files) { + Resource resource = null; + if ("unreviewed".equals(Utils.Files.getFileBaseName(subdir))) { + loadResources(subdir, project, 0, false); + } else if ("staging".equals(Utils.Files.getFileBaseName(subdir))) { + loadResources(subdir, project, 1, false); + } else { + // CHUPA CHUPA + // resource = KimAdapter.adaptResource(Utils.Json + // .load(new File(subdir + File.separator + "resource + // .json"), + // ResourceReference.class)); } - } - - private void indexKnowledge() { - - // TODO index ontologies - - for (var namespace : workspaceManager.getNamespaces()) { - kbox.remove(namespace.getUrn(), scope); - for (var statement : namespace.getStatements()) { - if (statement instanceof KimModel model) { - kbox.store(model, scope); - } - } + if (resource != null) { + localResources.add(resource.getUrn()); + ResourceStatus status = catalog.get(resource.getUrn()); + if (status == null) { + status = new ResourceStatus(); + status.setReviewStatus(level); + status.setFileLocation(subdir); + status.setType( + Utils.Notifications.hasErrors(resource.getNotifications()) + ? ResourceStatus.Type.OFFLINE + : ResourceStatus.Type.AVAILABLE); + status.setLegacy(legacy); + status.setKnowledgeClass(KnowledgeClass.RESOURCE); + // TODO fill in the rest + catalog.put(resource.getUrn(), status); + } } - - } - - @Override - public KimNamespace resolveNamespace(String urn, Scope scope) { - return this.workspaceManager.getNamespace(urn); - // TODO check scope for authorization + } } + } - @Override - public KimOntology resolveOntology(String urn, Scope scope) { - return this.workspaceManager.getOntology(urn); - // TODO check scope for authorization - } + private void indexKnowledge() { - @Override - public KActorsBehavior resolveBehavior(String urn, Scope scope) { - return this.workspaceManager.getBehavior(urn); - // TODO check scope for authorization - } + // TODO index ontologies - public KimObservationStrategyDocument resolveObservationStrategyDocument(String urn, Scope scope) { - return this.workspaceManager.getStrategyDocument(urn); - // TODO check scope for authorization - } - - @Override - public Resource resolveResource(String urn, Scope scope) { - if (localResources.contains(Urn.removeParameters(urn))) { - // TODO + for (var namespace : workspaceManager.getNamespaces()) { + kbox.remove(namespace.getUrn(), scope); + for (var statement : namespace.getStatements()) { + if (statement instanceof KimModel model) { + kbox.store(model, scope); } - return null; + } } - - @Override - public Workspace resolveWorkspace(String urn, Scope scope) { - // TODO check permissions in scope, possibly filter the workspace's projects - return this.workspaceManager.getWorkspace(urn); + } + + @Override + public KimNamespace resolveNamespace(String urn, Scope scope) { + return this.workspaceManager.getNamespace(urn); + // TODO check scope for authorization + } + + @Override + public KimOntology resolveOntology(String urn, Scope scope) { + return this.workspaceManager.getOntology(urn); + // TODO check scope for authorization + } + + @Override + public KActorsBehavior resolveBehavior(String urn, Scope scope) { + return this.workspaceManager.getBehavior(urn); + // TODO check scope for authorization + } + + public KimObservationStrategyDocument resolveObservationStrategyDocument( + String urn, Scope scope) { + return this.workspaceManager.getStrategyDocument(urn); + // TODO check scope for authorization + } + + @Override + public Resource resolveResource(String urn, Scope scope) { + if (localResources.contains(Urn.removeParameters(urn))) { + // TODO } - - @Override - public ResourceSet resolveServiceCall(String name, Version version, Scope scope) { - - ResourceSet ret = new ResourceSet(); - boolean empty = true; - for (var component : getComponentRegistry().resolveServiceCall(name, version)) { - if (component.permissions().checkAuthorization(scope)) { - empty = false; - ret.getResults().add( - new ResourceSet.Resource(this.serviceId(), component.id(), null, component.version(), - KnowledgeClass.COMPONENT)); - } - } - - if (!empty) { - ret.getServices().put(this.serviceId(), this.getUrl()); - } - - ret.setEmpty(empty); - - return ret; + return null; + } + + @Override + public Workspace resolveWorkspace(String urn, Scope scope) { + // TODO check permissions in scope, possibly filter the workspace's projects + return this.workspaceManager.getWorkspace(urn); + } + + @Override + public ResourceSet resolveServiceCall(String name, Version version, Scope scope) { + + ResourceSet ret = new ResourceSet(); + boolean empty = true; + for (var component : getComponentRegistry().resolveServiceCall(name, version)) { + if (component.permissions().checkAuthorization(scope)) { + empty = false; + ret.getResults() + .add( + new ResourceSet.Resource( + this.serviceId(), + component.id(), + null, + component.version(), + KnowledgeClass.COMPONENT)); + } } - @Override - public Data contextualize(Resource resource, Geometry geometry, @Nullable Data input, Scope scope) { - var adapter = getComponentRegistry().getAdapter(resource.getAdapterType(), scope); - if (adapter == null) { - return Data.empty("Adapter " + resource.getAdapterType() + " not available"); - } - return adapter.encode(resource, geometry); + if (!empty) { + ret.getServices().put(this.serviceId(), this.getUrl()); } - @Override - public KimObservationStrategyDocument resolveDataflow(String urn, Scope scope) { - // TODO Auto-generated method stub - return null; - } + ret.setEmpty(empty); - @Override - public List dependents(String namespaceId) { - return null; - } + return ret; + } - @Override - public List precursors(String namespaceId) { - return null; + @Override + public Data contextualize( + Resource resource, Geometry geometry, @Nullable Data input, Scope scope) { + var adapter = getComponentRegistry().getAdapter(resource.getAdapterType(), scope); + if (adapter == null) { + return Data.empty("Adapter " + resource.getAdapterType() + " not available"); } - - - /** - * TODO improve logics: the main function should return the appropriate ProjectStorage for the URL in - * all cases. Then call importProject (storage) when all the different storages are implemented. - *

- * TODO add scope so we can record the owner/importer in the project rights - * - * @param workspaceName - * @param projectUrl can be a file (zip, jar, existing folder, or anything supported by - * extensions), a git URL (with a potential branch name after a # sign) or a - * http URL from another resource manager. Could also be a service URL for - * mirroring. - * @param overwriteIfExisting self-explanatory. If the project is remote, reload if true. - * @return - * @deprecated use project import schema + register resource - */ - // @Override - public synchronized List importProject(String workspaceName, String projectUrl, - boolean overwriteIfExisting, UserScope scope) { - - var storage = workspaceManager.importProject(projectUrl, workspaceName); - if (storage == null) { - return List.of(Utils.Resources.createEmpty( - Notification.create("Import failed for " + projectUrl, Notification.Level.Error))); - } - - var project = workspaceManager.loadProject(storage, workspaceName); - - // initial resource permissions - var status = new ResourceStatus(); - if (scope.getIdentity() instanceof UserIdentity user) { - status.getPrivileges().getAllowedUsers().add(user.getUsername()); - status.setOwner(user.getUsername()); - } - status.setFileLocation(storage instanceof FileProjectStorage fps ? fps.getRootFolder() : null); - status.setKnowledgeClass(KnowledgeClass.PROJECT); - status.setReviewStatus(0); - status.setType(ResourceStatus.Type.AVAILABLE); - status.setLegacy(false); - catalog.put(project.getUrn(), status); - db.commit(); - - return collectProject(project.getUrn(), CRUDOperation.CREATE, workspaceName, scope); - } - - @Override - public ResourceSet createProject(String workspaceName, String projectName, UserScope scope) { - return null; - } - - @Override - public ResourceSet updateProject(String projectName, Manifest manifest, Metadata metadata, - UserScope scope) { - return null; + return adapter.encode(resource, geometry); + } + + @Override + public KimObservationStrategyDocument resolveDataflow(String urn, Scope scope) { + // TODO Auto-generated method stub + return null; + } + + @Override + public List dependents(String namespaceId) { + return null; + } + + @Override + public List precursors(String namespaceId) { + return null; + } + + /** + * TODO improve logics: the main function should return the appropriate ProjectStorage for the URL + * in all cases. Then call importProject (storage) when all the different storages are + * implemented. + * + *

TODO add scope so we can record the owner/importer in the project rights + * + * @param workspaceName + * @param projectUrl can be a file (zip, jar, existing folder, or anything supported by + * extensions), a git URL (with a potential branch name after a # sign) or a http URL from + * another resource manager. Could also be a service URL for mirroring. + * @param overwriteIfExisting self-explanatory. If the project is remote, reload if true. + * @return + * @deprecated use project import schema + register resource + */ + // @Override + public synchronized List importProject( + String workspaceName, String projectUrl, boolean overwriteIfExisting, UserScope scope) { + + var storage = workspaceManager.importProject(projectUrl, workspaceName); + if (storage == null) { + return List.of( + Utils.Resources.createEmpty( + Notification.create("Import failed for " + projectUrl, Notification.Level.Error))); } + var project = workspaceManager.loadProject(storage, workspaceName); - @Override - public List createDocument(String projectName, String documentUrn, - ProjectStorage.ResourceType documentType, UserScope scope) { - return this.workspaceManager.createDocument(projectName, documentType, documentUrn, scope); + // initial resource permissions + var status = new ResourceStatus(); + if (scope.getIdentity() instanceof UserIdentity user) { + status.getPrivileges().getAllowedUsers().add(user.getUsername()); + status.setOwner(user.getUsername()); } - - @Override - public List updateDocument(String projectName, ProjectStorage.ResourceType documentType, - String content, UserScope scope) { - return this.workspaceManager.updateDocument(projectName, documentType, content, scope); + status.setFileLocation(storage instanceof FileProjectStorage fps ? fps.getRootFolder() : null); + status.setKnowledgeClass(KnowledgeClass.PROJECT); + status.setReviewStatus(0); + status.setType(ResourceStatus.Type.AVAILABLE); + status.setLegacy(false); + catalog.put(project.getUrn(), status); + db.commit(); + + return collectProject(project.getUrn(), CRUDOperation.CREATE, workspaceName, scope); + } + + @Override + public ResourceSet createProject(String workspaceName, String projectName, UserScope scope) { + return null; + } + + @Override + public ResourceSet updateProject( + String projectName, Manifest manifest, Metadata metadata, UserScope scope) { + return null; + } + + @Override + public List createDocument( + String projectName, + String documentUrn, + ProjectStorage.ResourceType documentType, + UserScope scope) { + return this.workspaceManager.createDocument(projectName, documentType, documentUrn, scope); + } + + @Override + public List updateDocument( + String projectName, + ProjectStorage.ResourceType documentType, + String content, + UserScope scope) { + return this.workspaceManager.updateDocument(projectName, documentType, content, scope); + } + + @Override + public List deleteProject(String projectName, UserScope scope) { + + updateLock.writeLock().lock(); + // + // try { + // // remove namespaces, behaviors and resources + // var project = localProjects.get(projectName); + // if (project != null) { + // for (var namespace : project.getNamespaces()) { + // this.localNamespaces.remove(namespace.getUrn()); + // } + // for (var ontology : project.getOntologies()) { + // this.servedOntologies.remove(ontology.getUrn()); + // } + // for (KActorsBehavior behavior : project.getBehaviors()) { + // this.localBehaviors.remove(behavior.getUrn()); + // } + // for (String resource : project.getResourceUrns()) { + // localResources.remove(resource); + // catalog.remove(resource); + // } + // this.localProjects.remove(projectName); + // } + workspaceManager.removeProject(projectName); + db.commit(); + + // }/* finally {*/ + updateLock.writeLock().unlock(); + /*}*/ + + return null; + } + + @Override + public List deleteWorkspace(String workspaceName, UserScope scope) { + Workspace workspace = workspaceManager.getWorkspace(workspaceName); + for (Project project : workspace.getProjects()) { + deleteProject(project.getUrn(), scope); } - - @Override - public List deleteProject(String projectName, UserScope scope) { - - updateLock.writeLock().lock(); - // - // try { - // // remove namespaces, behaviors and resources - // var project = localProjects.get(projectName); - // if (project != null) { - // for (var namespace : project.getNamespaces()) { - // this.localNamespaces.remove(namespace.getUrn()); - // } - // for (var ontology : project.getOntologies()) { - // this.servedOntologies.remove(ontology.getUrn()); - // } - // for (KActorsBehavior behavior : project.getBehaviors()) { - // this.localBehaviors.remove(behavior.getUrn()); - // } - // for (String resource : project.getResourceUrns()) { - // localResources.remove(resource); - // catalog.remove(resource); - // } - // this.localProjects.remove(projectName); - // } - workspaceManager.removeProject(projectName); - db.commit(); - - // }/* finally {*/ - updateLock.writeLock().unlock(); - /*}*/ - - return null; + // try { + // updateLock.writeLock().lock(); + //// this.localWorkspaces.remove(workspaceName); + // } finally { + // updateLock.writeLock().unlock(); + // }\ + return null; + } + + @Override + public Collection listWorkspaces() { + return this.workspaceManager.getWorkspaces(); + } + + @Override + public boolean shutdown() { + return shutdown(30); + } + + @Override + public boolean scopesAreReactive() { + return false; + } + + public boolean shutdown(int secondsToWait) { + + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceUnavailable, + capabilities(serviceScope())); + + // try { + // projectLoader.awaitTermination(secondsToWait, TimeUnit.SECONDS); + return super.shutdown(); + // } catch (InterruptedException e) { + // Logging.INSTANCE.error("Error during thread termination", e); + // } + // return false; + } + + @Override + public Capabilities capabilities(Scope scope) { + + var ret = new ResourcesCapabilitiesImpl(); + ret.setWorldviewProvider(workspaceManager.isWorldviewProvider()); + ret.setAdoptedWorldview(workspaceManager.getAdoptedWorldview()); + ret.setWorkspaceNames(workspaceManager.getWorkspaceURNs()); + ret.setType(Type.RESOURCES); + ret.setServiceName("Resources"); + ret.setServerId(hardwareSignature == null ? null : ("RESOURCES_" + hardwareSignature)); + ret.setServiceId(workspaceManager.getConfiguration().getServiceId()); + ret.getServiceNotifications().addAll(serviceNotifications()); + // TODO capabilities are being asked from same machine as the one that runs the server. This + // call + // should have a @Nullable scope. The condition here is silly. + ret.getPermissions().add(CRUDOperation.CREATE); + ret.getPermissions().add(CRUDOperation.DELETE); + ret.getPermissions().add(CRUDOperation.UPDATE); + ret.getExportSchemata().putAll(ResourceTransport.INSTANCE.getExportSchemata()); + ret.getImportSchemata().putAll(ResourceTransport.INSTANCE.getImportSchemata()); + ret.setBrokerURI( + embeddedBroker != null + ? embeddedBroker.getURI() + : workspaceManager.getConfiguration().getBrokerURI()); + ret.setAvailableMessagingQueues( + Utils.URLs.isLocalHost(getUrl()) + ? EnumSet.of( + Message.Queue.Info, + Message.Queue.Errors, + Message.Queue.Warnings, + Message.Queue.Events) + : EnumSet.noneOf(Message.Queue.class)); + + return ret; + } + + @Override + public String serviceId() { + return workspaceManager.getConfiguration().getServiceId(); + } + + @Override + public KimObservable resolveObservable(String definition) { + var parsed = this.workspaceManager.resolveObservable(removeExcessParentheses(definition)); + if (parsed != null) { + boolean errors = false; + for (var notification : parsed.getNotifications()) { + if (notification.message().level() == LanguageValidationScope.Level.ERROR) { + errors = true; + scope.error(notification.message().message()); + } else if (notification.message().level() == LanguageValidationScope.Level.WARNING) { + scope.error(notification.message().message()); + } + } + return errors ? null : LanguageAdapter.INSTANCE.adaptObservable(parsed, null, null, null); } - - - @Override - public List deleteWorkspace(String workspaceName, UserScope scope) { - Workspace workspace = workspaceManager.getWorkspace(workspaceName); - for (Project project : workspace.getProjects()) { - deleteProject(project.getUrn(), scope); + return null; + } + + @Override + public KimConcept.Descriptor describeConcept(String conceptUrn) { + return workspaceManager.describeConcept(conceptUrn); + } + + @Override + public KimConcept resolveConcept(String definition) { + var parsed = this.workspaceManager.resolveConcept(removeExcessParentheses(definition)); + if (parsed != null) { + boolean errors = false; + for (var notification : parsed.getNotifications()) { + if (notification.message().level() == LanguageValidationScope.Level.ERROR) { + errors = true; + scope.error(notification.message().message()); + } else if (notification.message().level() == LanguageValidationScope.Level.WARNING) { + scope.error(notification.message().message()); } - // try { - // updateLock.writeLock().lock(); - //// this.localWorkspaces.remove(workspaceName); - // } finally { - // updateLock.writeLock().unlock(); - // }\ - return null; + } + return errors ? null : LanguageAdapter.INSTANCE.adaptSemantics(parsed, null, null, null); } + return null; + } - @Override - public Collection listWorkspaces() { - return this.workspaceManager.getWorkspaces(); + private String removeExcessParentheses(String definition) { + definition = definition.trim(); + while (definition.startsWith("(") && definition.endsWith(")")) { + definition = definition.substring(1, definition.length() - 1); } + return definition; + } - @Override - public boolean shutdown() { - return shutdown(30); - } + @Override + public List projects(Collection projects, Scope scope) { - @Override - public boolean scopesAreReactive() { - return false; - } + ResourceSet ret = new ResourceSet(); - public boolean shutdown(int secondsToWait) { + // TODO + // for (String projectName : this.configuration.getProjectConfiguration().keySet()) { + // if (projects.contains(projectName)) { + //// if (!localProjects.containsKey(projectName)) { + //// importProject(projectName, this.configuration.getProjectConfiguration() + // .get + // (projectName)); + //// } + // ret = Utils.Resources.merge(ret, collectProject(projectName, scope)); + // } + // } - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceUnavailable, - capabilities(serviceScope())); + return List.of(); // sort(ret, scope); + } - // try { - // projectLoader.awaitTermination(secondsToWait, TimeUnit.SECONDS); - return super.shutdown(); - // } catch (InterruptedException e) { - // Logging.INSTANCE.error("Error during thread termination", e); - // } - // return false; - } + private ResourceSet sort(ResourceSet ret, Scope scope) { - @Override - public Capabilities capabilities(Scope scope) { - - var ret = new ResourcesCapabilitiesImpl(); - ret.setWorldviewProvider(workspaceManager.isWorldviewProvider()); - ret.setAdoptedWorldview(workspaceManager.getAdoptedWorldview()); - ret.setWorkspaceNames(workspaceManager.getWorkspaceURNs()); - ret.setType(Type.RESOURCES); - ret.setServiceName("Resources"); - ret.setServerId(hardwareSignature == null ? null : ("RESOURCES_" + hardwareSignature)); - ret.setServiceId(workspaceManager.getConfiguration().getServiceId()); - ret.getServiceNotifications().addAll(serviceNotifications()); - // TODO capabilities are being asked from same machine as the one that runs the server. This call - // should have a @Nullable scope. The condition here is silly. - ret.getPermissions().add(CRUDOperation.CREATE); - ret.getPermissions().add(CRUDOperation.DELETE); - ret.getPermissions().add(CRUDOperation.UPDATE); - ret.getExportSchemata().putAll(ResourceTransport.INSTANCE.getExportSchemata()); - ret.getImportSchemata().putAll(ResourceTransport.INSTANCE.getImportSchemata()); - ret.setBrokerURI( - embeddedBroker != null ? embeddedBroker.getURI() : - workspaceManager.getConfiguration().getBrokerURI()); - ret.setAvailableMessagingQueues( - Utils.URLs.isLocalHost(getUrl()) ? EnumSet.of(Message.Queue.Info, Message.Queue.Errors, - Message.Queue.Warnings, - Message.Queue.Events) : EnumSet.noneOf( - Message.Queue.class)); + Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); - return ret; - } + for (ResourceSet.Resource ns : ret.getNamespaces()) { - @Override - public String serviceId() { - return workspaceManager.getConfiguration().getServiceId(); - } + // TODO use a recursive function to capture n-th level deps that aren't resolved + // directly, although this doesn't apply if we have the whole workspace - @Override - public KimObservable resolveObservable(String definition) { - var parsed = this.workspaceManager.resolveObservable(removeExcessParentheses(definition)); - if (parsed != null) { - boolean errors = false; - for (var notification : parsed.getNotifications()) { - if (notification.message().level() == LanguageValidationScope.Level.ERROR) { - errors = true; - scope.error(notification.message().message()); - } else if (notification.message().level() == LanguageValidationScope.Level.WARNING) { - scope.error(notification.message().message()); - } - } - return errors ? null : LanguageAdapter.INSTANCE.adaptObservable(parsed, null, null, null); + graph.addVertex(ns.getResourceUrn()); + KimNamespace namespace = resolveNamespace(ns.getResourceUrn(), scope); + if (namespace == null) { + ret.setEmpty(true); + return ret; + } + for (String imp : namespace.getImports().keySet()) { + KimNamespace imported = resolveNamespace(imp, scope); + if (imported == null) { + ret.setEmpty(true); + return ret; } - return null; + graph.addVertex(imported.getUrn()); + if (imported.getUrn().equals(namespace.getUrn())) { + System.out.println("DIO ZAPPA"); + } + graph.addEdge(imported.getUrn(), namespace.getUrn()); + } } - @Override - public KimConcept.Descriptor describeConcept(String conceptUrn) { - return workspaceManager.describeConcept(conceptUrn); + TopologicalOrderIterator order = new TopologicalOrderIterator<>(graph); + Map toSort = new HashMap<>(); + ret.getNamespaces().forEach((ns) -> toSort.put(ns.getResourceUrn(), ns)); + ret.getNamespaces().clear(); + while (order.hasNext()) { + ret.getNamespaces().add(toSort.get(order.next())); } - @Override - public KimConcept resolveConcept(String definition) { - var parsed = this.workspaceManager.resolveConcept(removeExcessParentheses(definition)); - if (parsed != null) { - boolean errors = false; - for (var notification : parsed.getNotifications()) { - if (notification.message().level() == LanguageValidationScope.Level.ERROR) { - errors = true; - scope.error(notification.message().message()); - } else if (notification.message().level() == LanguageValidationScope.Level.WARNING) { - scope.error(notification.message().message()); - } - } - return errors ? null : LanguageAdapter.INSTANCE.adaptSemantics(parsed, null, null, null); - } - return null; + return ret; + } + + /** + * Collect all known project data, fulfilling any missing external dependencies but not sorting + * the results by dependency as this could be one step in a multiple-project setup. If external + * dependencies are needed and unsatisfied, return an empty resourceset. + * + * @param projectName + * @param scope + * @return + */ + private List collectProject( + String projectName, CRUDOperation operation, String workspace, Scope scope) { + + List ret = new ArrayList<>(); + + List ontologies = + this.workspaceManager.getOntologies(false).stream() + .filter(o -> projectName.equals(o.getProjectName())) + .toList(); + List namespaces = + this.workspaceManager.getNamespaces().stream() + .filter(o -> projectName.equals(o.getProjectName())) + .toList(); + List strategies = + this.workspaceManager.getStrategyDocuments().stream() + .filter(o -> projectName.equals(o.getProjectName())) + .toList(); + List behaviors = + this.workspaceManager.getBehaviors().stream() + .filter(o -> projectName.equals(o.getProjectName())) + .toList(); + + // Resources work independently and do not come with the project data. + + // check if the worldview is impacted, too + var worldviewOntologies = + getWorldview().getOntologies().stream().map(KlabAsset::getUrn).collect(Collectors.toSet()); + var worldviewStrategies = + getWorldview().getObservationStrategies().stream() + .map(KlabAsset::getUrn) + .collect(Collectors.toSet()); + + var conts = + Sets.intersection( + worldviewOntologies, + ontologies.stream().map(KlabAsset::getUrn).collect(Collectors.toSet())); + var cstra = + Sets.intersection( + worldviewStrategies, + strategies.stream().map(KlabAsset::getUrn).collect(Collectors.toSet())); + + if (!conts.isEmpty() || !cstra.isEmpty()) { + ret.add( + Utils.Resources.create( + this, + Worldview.WORLDVIEW_WORKSPACE_IDENTIFIER, + operation, + Utils.Collections.shallowCollection( + ontologies.stream().filter(o -> conts.contains(o.getUrn())).toList(), + strategies.stream().filter(o -> conts.contains(o.getUrn())).toList()) + .toArray(new KlabAsset[0]))); } - private String removeExcessParentheses(String definition) { - definition = definition.trim(); - while (definition.startsWith("(") && definition.endsWith(")")) { - definition = definition.substring(1, definition.length() - 1); - } - return definition; + ret.add( + Utils.Resources.create( + this, + workspace, + operation, + Utils.Collections.shallowCollection(ontologies, strategies, namespaces, behaviors) + .toArray(new KlabAsset[0]))); + + return ret; + } + + @Override + public ResourceSet model(String modelName, Scope scope) { + // TODO Auto-generated method stub + return null; + } + + @Override + public List manageRepository( + String projectName, RepositoryState.Operation operation, String... arguments) { + return workspaceManager.manageRepository(projectName, operation, arguments); + } + + // @Override + // public ResourceSet createResource(Resource resource, UserScope scope) { + // // TODO Auto-generated method stub + // return null; + // } + // + // @Override + // public ResourceSet createResource(Dataflow dataflow, UserScope scope) { + // return null; + // } + + // @Override + @Deprecated // remove when the import mechanism can do this + public ResourceSet createResource(File resourcePath, UserScope scope) { + + KnowledgeClass knowledgeClass = null; + File sourceFile = null; + String urn = null; + ResourceSet ret = null; + + if ("jar".equals(Utils.Files.getFileExtension(resourcePath))) { + var imported = getComponentRegistry().installComponent(resourcePath, null, scope); + knowledgeClass = KnowledgeClass.COMPONENT; + sourceFile = imported.getFirst().sourceArchive(); + urn = imported.getFirst().id(); + ret = imported.getSecond(); + } else { + // TODO resource, mirror archive } - @Override - public List projects(Collection projects, Scope scope) { - - ResourceSet ret = new ResourceSet(); - - // TODO - // for (String projectName : this.configuration.getProjectConfiguration().keySet()) { - // if (projects.contains(projectName)) { - //// if (!localProjects.containsKey(projectName)) { - //// importProject(projectName, this.configuration.getProjectConfiguration() - // .get - // (projectName)); - //// } - // ret = Utils.Resources.merge(ret, collectProject(projectName, scope)); - // } - // } - - return List.of(); // sort(ret, scope); + if (urn != null) { + // initial resource permissions + var status = new ResourceStatus(); + if (scope.getIdentity() instanceof UserIdentity user) { + status.getPrivileges().getAllowedUsers().add(user.getUsername()); + status.setOwner(user.getUsername()); + } + status.setFileLocation(sourceFile); + status.setKnowledgeClass(knowledgeClass); + status.setReviewStatus(0); + status.setType(ResourceStatus.Type.AVAILABLE); + status.setLegacy(false); + catalog.put(urn, status); + db.commit(); } - private ResourceSet sort(ResourceSet ret, Scope scope) { - - Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); - - for (ResourceSet.Resource ns : ret.getNamespaces()) { - - // TODO use a recursive function to capture n-th level deps that aren't resolved - // directly, although this doesn't apply if we have the whole workspace - - graph.addVertex(ns.getResourceUrn()); - KimNamespace namespace = resolveNamespace(ns.getResourceUrn(), scope); - if (namespace == null) { - ret.setEmpty(true); - return ret; - } - for (String imp : namespace.getImports().keySet()) { - KimNamespace imported = resolveNamespace(imp, scope); - if (imported == null) { - ret.setEmpty(true); - return ret; - } - graph.addVertex(imported.getUrn()); - if (imported.getUrn().equals(namespace.getUrn())) { - System.out.println("DIO ZAPPA"); - } - graph.addEdge(imported.getUrn(), namespace.getUrn()); - } - } - - TopologicalOrderIterator order = new TopologicalOrderIterator<>(graph); - Map toSort = new HashMap<>(); - ret.getNamespaces().forEach((ns) -> toSort.put(ns.getResourceUrn(), ns)); - ret.getNamespaces().clear(); - while (order.hasNext()) { - ret.getNamespaces().add(toSort.get(order.next())); - } - - return ret; + return ret; + } + + // @Override + // public Resource createResource(String projectName, String urnId, String adapter, + // Parameters resourceData, UserScope scope) { + // return null; + // } + + @Override + public ResourceStatus registerResource( + String urn, KnowledgeClass knowledgeClass, File fileLocation, Scope submittingScope) { + + if (urn != null) { + // initial resource permissions + var status = new ResourceStatus(); + if (scope.getIdentity() instanceof UserIdentity user) { + status.getPrivileges().getAllowedUsers().add(user.getUsername()); + status.setOwner(user.getUsername()); + } + status.setFileLocation(fileLocation); + status.setKnowledgeClass(knowledgeClass); + status.setReviewStatus(0); + status.setType(ResourceStatus.Type.AVAILABLE); + status.setLegacy(false); + catalog.put(urn, status); + db.commit(); + return status; } - /** - * Collect all known project data, fulfilling any missing external dependencies but not sorting the - * results by dependency as this could be one step in a multiple-project setup. If external dependencies - * are needed and unsatisfied, return an empty resourceset. - * - * @param projectName - * @param scope - * @return - */ - private List collectProject(String projectName, CRUDOperation operation, String workspace, - Scope scope) { - - List ret = new ArrayList<>(); - - List ontologies = this.workspaceManager.getOntologies(false).stream().filter( - o -> projectName.equals(o.getProjectName())).toList(); - List namespaces = this.workspaceManager.getNamespaces().stream().filter( - o -> projectName.equals(o.getProjectName())).toList(); - List strategies = - this.workspaceManager.getStrategyDocuments().stream().filter( - o -> projectName.equals(o.getProjectName())).toList(); - List behaviors = this.workspaceManager.getBehaviors().stream().filter( - o -> projectName.equals(o.getProjectName())).toList(); - - // Resources work independently and do not come with the project data. - - // check if the worldview is impacted, too - var worldviewOntologies = getWorldview().getOntologies().stream().map(KlabAsset::getUrn).collect( - Collectors.toSet()); - var worldviewStrategies = getWorldview().getObservationStrategies().stream().map( - KlabAsset::getUrn).collect(Collectors.toSet()); - - var conts = Sets.intersection(worldviewOntologies, - ontologies.stream().map(KlabAsset::getUrn).collect(Collectors.toSet())); - var cstra = Sets.intersection(worldviewStrategies, - strategies.stream().map(KlabAsset::getUrn).collect(Collectors.toSet())); - - if (!conts.isEmpty() || !cstra.isEmpty()) { - ret.add(Utils.Resources.create(this, Worldview.WORLDVIEW_WORKSPACE_IDENTIFIER, operation, - Utils.Collections.shallowCollection(ontologies.stream().filter( - o -> conts.contains(o.getUrn())).toList(), - strategies.stream().filter( - o -> conts.contains( - o.getUrn())).toList()).toArray( - new KlabAsset[0]))); - } + return ResourceStatus.offline(); + } - ret.add(Utils.Resources.create(this, workspace, operation, - Utils.Collections.shallowCollection(ontologies, strategies, namespaces, - behaviors).toArray( - new KlabAsset[0]))); + @Override + public List deleteDocument(String projectName, String assetUrn, UserScope scope) { + return null; + } - return ret; - } + // public void setLocalName(String localName) { + // this.localName = localName; + // } - @Override - public ResourceSet model(String modelName, Scope scope) { - // TODO Auto-generated method stub - return null; - } + @Override + public ResourceSet queryModels(Observable observable, ContextScope scope) { - @Override - public List manageRepository(String projectName, RepositoryState.Operation operation, - String... arguments) { - return workspaceManager.manageRepository(projectName, operation, arguments); + if (!semanticSearchAvailable) { + Logging.INSTANCE.warn( + "Semantic search is not available: client should not make this request"); + return ResourceSet.empty(); } - // @Override - // public ResourceSet createResource(Resource resource, UserScope scope) { - // // TODO Auto-generated method stub - // return null; - // } - // - // @Override - // public ResourceSet createResource(Dataflow dataflow, UserScope scope) { - // return null; - // } - - // @Override - @Deprecated // remove when the import mechanism can do this - public ResourceSet createResource(File resourcePath, UserScope scope) { - - KnowledgeClass knowledgeClass = null; - File sourceFile = null; - String urn = null; - ResourceSet ret = null; - - if ("jar".equals(Utils.Files.getFileExtension(resourcePath))) { - var imported = getComponentRegistry().installComponent(resourcePath, null, scope); - knowledgeClass = KnowledgeClass.COMPONENT; - sourceFile = imported.getFirst().sourceArchive(); - urn = imported.getFirst().id(); - ret = imported.getSecond(); - } else { - // TODO resource, mirror archive - } - - if (urn != null) { - // initial resource permissions - var status = new ResourceStatus(); - if (scope.getIdentity() instanceof UserIdentity user) { - status.getPrivileges().getAllowedUsers().add(user.getUsername()); - status.setOwner(user.getUsername()); - } - status.setFileLocation(sourceFile); - status.setKnowledgeClass(knowledgeClass); - status.setReviewStatus(0); - status.setType(ResourceStatus.Type.AVAILABLE); - status.setLegacy(false); - catalog.put(urn, status); - db.commit(); - } - - return ret; + ResourceSet results = new ResourceSet(); + // FIXME use the observation's scale (pass the observation) + for (ModelReference model : this.kbox.query(observable, scope)) { + results + .getResults() + .add( + new ResourceSet.Resource( + getUrl().toString(), + model.getName(), + model.getProjectUrn(), + model.getVersion(), + KnowledgeClass.MODEL)); } - // @Override - // public Resource createResource(String projectName, String urnId, String adapter, - // Parameters resourceData, UserScope scope) { - // return null; - // } - - @Override - public ResourceStatus registerResource(String urn, KnowledgeClass knowledgeClass, File fileLocation, - Scope submittingScope) { - - if (urn != null) { - // initial resource permissions - var status = new ResourceStatus(); - if (scope.getIdentity() instanceof UserIdentity user) { - status.getPrivileges().getAllowedUsers().add(user.getUsername()); - status.setOwner(user.getUsername()); - } - status.setFileLocation(fileLocation); - status.setKnowledgeClass(knowledgeClass); - status.setReviewStatus(0); - status.setType(ResourceStatus.Type.AVAILABLE); - status.setLegacy(false); - catalog.put(urn, status); - db.commit(); - return status; - } - - return ResourceStatus.offline(); + addDependencies(results, scope); + + return results; + } + + /** + * The workspace manager calls the kbox directly + * + * @return + */ + public ModelKbox modelKbox() { + return this.kbox; + } + + /** + * Add a collection of namespaces to a result set, including their dependencies and listing the + * correspondent resources in dependency order. If any namespace isn't available, return false; + * + *

TODO/FIXME involve other services from the scope if a namespace is not available locally. + * + * @param namespaces + * @param results + */ + private boolean addNamespacesToResultSet( + Set namespaces, ResourceSet results, Scope scope) { + + DefaultDirectedGraph nss = new DefaultDirectedGraph<>(DefaultEdge.class); + Map storage = new HashMap<>(); + for (String ns : namespaces) { + if (!addNamespaceToResultSet(ns, nss, storage, scope)) { + return false; + } } - @Override - public List deleteDocument(String projectName, String assetUrn, UserScope scope) { - return null; + TopologicalOrderIterator order = new TopologicalOrderIterator<>(nss); + while (order.hasNext()) { + results.getNamespaces().add(storage.get(order.next())); } - // public void setLocalName(String localName) { - // this.localName = localName; - // } - - - @Override - public ResourceSet queryModels(Observable observable, ContextScope scope) { - - if (!semanticSearchAvailable) { - Logging.INSTANCE.warn("Semantic search is not available: client should not make this request"); - return ResourceSet.empty(); - } - - ResourceSet results = new ResourceSet(); - // FIXME use the observation's scale (pass the observation) - for (ModelReference model : this.kbox.query(observable, scope)) { - results.getResults().add( - new ResourceSet.Resource(getUrl().toString(), model.getName(), model.getProjectUrn(), - model.getVersion(), KnowledgeClass.MODEL)); - } + return true; + } - addDependencies(results, scope); + private boolean addNamespaceToResultSet( + String ns, + DefaultDirectedGraph nss, + Map storage, + Scope scope) { - return results; + if (nss.containsVertex(ns)) { + return true; } - /** - * The workspace manager calls the kbox directly - * - * @return - */ - public ModelKbox modelKbox() { - return this.kbox; + KimNamespace namespace = resolveNamespace(ns, scope); + if (namespace == null) { + // TODO use services in scope + return false; } - /** - * Add a collection of namespaces to a result set, including their dependencies and listing the - * correspondent resources in dependency order. If any namespace isn't available, return false; - *

- * TODO/FIXME involve other services from the scope if a namespace is not - * available locally. - * - * @param namespaces - * @param results - */ - private boolean addNamespacesToResultSet(Set namespaces, ResourceSet results, Scope scope) { - - DefaultDirectedGraph nss = new DefaultDirectedGraph<>(DefaultEdge.class); - Map storage = new HashMap<>(); - for (String ns : namespaces) { - if (!addNamespaceToResultSet(ns, nss, storage, scope)) { - return false; - } - } - - TopologicalOrderIterator order = new TopologicalOrderIterator<>(nss); - while (order.hasNext()) { - results.getNamespaces().add(storage.get(order.next())); - } + nss.addVertex(ns); - return true; + var dependency = namespace.getImports(); + for (String dependent : dependency.keySet()) { + if (!nss.containsVertex(dependent)) { + addNamespaceToResultSet(dependent, nss, storage, scope); + } + nss.addEdge(dependent, ns); } - private boolean addNamespaceToResultSet(String ns, DefaultDirectedGraph nss, - Map storage, Scope scope) { - - if (nss.containsVertex(ns)) { - return true; - } - - KimNamespace namespace = resolveNamespace(ns, scope); - if (namespace == null) { - // TODO use services in scope - return false; - } - - nss.addVertex(ns); - - var dependency = namespace.getImports(); - for (String dependent : dependency.keySet()) { - if (!nss.containsVertex(dependent)) { - addNamespaceToResultSet(dependent, nss, storage, scope); - } - nss.addEdge(dependent, ns); - } - - var resource = new ResourceSet.Resource(); - resource.setKnowledgeClass(KnowledgeClass.NAMESPACE); - resource.setResourceUrn(ns); - resource.setResourceVersion(namespace.getVersion()); - resource.setServiceId(serviceId()); - storage.put(ns, resource); - - return true; + var resource = new ResourceSet.Resource(); + resource.setKnowledgeClass(KnowledgeClass.NAMESPACE); + resource.setResourceUrn(ns); + resource.setResourceVersion(namespace.getVersion()); + resource.setServiceId(serviceId()); + storage.put(ns, resource); + + return true; + } + + @Override + public List queryResources(String urnPattern, KnowledgeClass... resourceTypes) { + + List ret = new ArrayList<>(); + Set wanted = EnumSet.noneOf(KnowledgeClass.class); + if (resourceTypes != null && resourceTypes.length > 0) { + wanted.addAll(Arrays.asList(resourceTypes)); + } else { + // we want them all + wanted.addAll(Arrays.asList(KnowledgeClass.values())); } - @Override - public List queryResources(String urnPattern, KnowledgeClass... resourceTypes) { - - List ret = new ArrayList<>(); - Set wanted = EnumSet.noneOf(KnowledgeClass.class); - if (resourceTypes != null && resourceTypes.length > 0) { - wanted.addAll(Arrays.asList(resourceTypes)); - } else { - // we want them all - wanted.addAll(Arrays.asList(KnowledgeClass.values())); - } - - if (wanted.contains(KnowledgeClass.RESOURCE)) { - - } - if (wanted.contains(KnowledgeClass.MODEL)) { + if (wanted.contains(KnowledgeClass.RESOURCE)) {} - } - if (wanted.contains(KnowledgeClass.SCRIPT)) { + if (wanted.contains(KnowledgeClass.MODEL)) {} - } - if (wanted.contains(KnowledgeClass.APPLICATION)) { + if (wanted.contains(KnowledgeClass.SCRIPT)) {} - } - if (wanted.contains(KnowledgeClass.BEHAVIOR)) { + if (wanted.contains(KnowledgeClass.APPLICATION)) {} - } - if (wanted.contains(KnowledgeClass.COMPONENT)) { + if (wanted.contains(KnowledgeClass.BEHAVIOR)) {} - } - if (wanted.contains(KnowledgeClass.NAMESPACE)) { + if (wanted.contains(KnowledgeClass.COMPONENT)) {} - } - if (wanted.contains(KnowledgeClass.PROJECT)) { + if (wanted.contains(KnowledgeClass.NAMESPACE)) {} - } - // if (wanted.contains(KnowledgeClass.INSTANCE)) { - // - // } + if (wanted.contains(KnowledgeClass.PROJECT)) {} - return ret; - } - - @Override - public ResourceStatus resourceStatus(String urn, Scope scope) { - ResourceStatus ret = catalog.get(urn); - if (ret != null && (ret.getType().isUsable())) { - /* - * TODO check the resource status at this time and in this scope - */ - } - return ret; - } - - @Override - public Project resolveProject(String projectName, Scope scope) { - // TODO check scope - return workspaceManager.getProject(projectName); - } - - @Override - public Coverage modelGeometry(String modelUrn) throws KlabIllegalArgumentException { - // TODO Auto-generated method stub - return null; + // if (wanted.contains(KnowledgeClass.INSTANCE)) { + // + // } + + return ret; + } + + @Override + public ResourceStatus resourceStatus(String urn, Scope scope) { + ResourceStatus ret = catalog.get(urn); + if (ret != null && (ret.getType().isUsable())) { + /* + * TODO check the resource status at this time and in this scope + */ } - - @Override - public KActorsBehavior readBehavior(URL url) { - return null; - // return KActorsAdapter.INSTANCE.readBehavior(url); + return ret; + } + + @Override + public Project resolveProject(String projectName, Scope scope) { + // TODO check scope + return workspaceManager.getProject(projectName); + } + + @Override + public Coverage modelGeometry(String modelUrn) throws KlabIllegalArgumentException { + // TODO Auto-generated method stub + return null; + } + + @Override + public KActorsBehavior readBehavior(URL url) { + return null; + // return KActorsAdapter.INSTANCE.readBehavior(url); + } + + @Override + public Collection listProjects(Scope scope) { + // FIXME filter by scope access + return workspaceManager.getProjects(); + } + + @Override + public Collection listResourceUrns(Scope scope) { + return localResources; + } + + @Override + public ResourcePrivileges getRights(String resourceUrn, Scope scope) { + + var status = catalog.get(resourceUrn); + if (status != null) { + return status.getPrivileges().asSeenByScope(scope); } - - @Override - public Collection listProjects(Scope scope) { - // FIXME filter by scope access - return workspaceManager.getProjects(); + return ResourcePrivileges.empty(); + } + + @Override + public boolean setRights(String resourceUrn, ResourcePrivileges resourcePrivileges, Scope scope) { + var status = catalog.get(resourceUrn); + if (status != null) { + status.setPrivileges(resourcePrivileges); + catalog.put(resourceUrn, status); + db.commit(); + return true; } + return false; + } + + @Override + public URL lockProject(String urn, UserScope scope) { + String token = scope.getIdentity().getId(); + boolean local = + scope instanceof ServiceScope + || (scope instanceof ServiceUserScope userScope && userScope.isLocal()); + return workspaceManager.lockProject(urn, token, local); + } + + @Override + public boolean unlockProject(String urn, UserScope scope) { + String token = scope.getIdentity().getId(); + return workspaceManager.unlockProject(urn, token); + } + + @Override + public ResourceSet resolve(String urn, Scope scope) { + + ResourceSet ret = new ResourceSet(); + + switch (Urn.classify(urn)) { + case RESOURCE -> {} + case KIM_OBJECT -> { + + /** TODO may be a project or even a workspace */ + KimNamespace namespace = resolveNamespace(urn, scope); + if (namespace != null) { + + ret.getResults() + .add( + new ResourceSet.Resource( + getUrl().toString(), + urn, + namespace.getProjectName(), + namespace.getVersion(), + KnowledgeClass.NAMESPACE)); - @Override - public Collection listResourceUrns(Scope scope) { - return localResources; - } - - @Override - public ResourcePrivileges getRights(String resourceUrn, Scope scope) { + } else { - var status = catalog.get(resourceUrn); - if (status != null) { - return status.getPrivileges().asSeenByScope(scope); + /* + * extract namespace and check for that. + */ + String ns = Utils.Paths.getLeading(urn, '.'); + String nm = Utils.Paths.getLast(urn, '.'); + namespace = resolveNamespace(ns, scope); + /* + * TODO check permissions! + */ + if (namespace != null) { + for (KlabStatement statement : namespace.getStatements()) { + if (urn.equals(statement.getUrn())) { + ret.getResults() + .add( + new ResourceSet.Resource( + getUrl().toString(), + urn, + namespace.getProjectName(), + namespace.getVersion(), + KlabAsset.classify(statement))); + break; + } + } + } } - return ResourcePrivileges.empty(); + } + case OBSERVABLE -> {} + case REMOTE_URL -> {} + case UNKNOWN -> {} } - @Override - public boolean setRights(String resourceUrn, ResourcePrivileges resourcePrivileges, Scope scope) { - var status = catalog.get(resourceUrn); - if (status != null) { - status.setPrivileges(resourcePrivileges); - catalog.put(resourceUrn, status); - db.commit(); - return true; - } - return false; - } + return addDependencies(ret, scope); + } - @Override - public URL lockProject(String urn, UserScope scope) { - String token = scope.getIdentity().getId(); - boolean local = - scope instanceof ServiceScope || (scope instanceof ServiceUserScope userScope && userScope.isLocal()); - return workspaceManager.lockProject(urn, token, local); - } + /* + * TODO add dependencies to resource set containing only local resources, + * including merging any remote resources in view of the passed scope. SET TO + * EMPTY if dependencies cannot be resolved in this scope. + */ + private ResourceSet addDependencies(ResourceSet resourceSet, Scope scope) { - @Override - public boolean unlockProject(String urn, UserScope scope) { - String token = scope.getIdentity().getId(); - return workspaceManager.unlockProject(urn, token); + if (resourceSet.getResults().isEmpty()) { + resourceSet.setEmpty(true); + return resourceSet; } - @Override - public ResourceSet resolve(String urn, Scope scope) { - - ResourceSet ret = new ResourceSet(); - - switch (Urn.classify(urn)) { - case RESOURCE -> { - } - case KIM_OBJECT -> { - - /** - * TODO may be a project or even a workspace - */ - - KimNamespace namespace = resolveNamespace(urn, scope); - if (namespace != null) { - - ret.getResults().add( - new ResourceSet.Resource(getUrl().toString(), urn, namespace.getProjectName(), - namespace.getVersion(), KnowledgeClass.NAMESPACE)); - - } else { - - - /* - * extract namespace and check for that. - */ - String ns = Utils.Paths.getLeading(urn, '.'); - String nm = Utils.Paths.getLast(urn, '.'); - namespace = resolveNamespace(ns, scope); - /* - * TODO check permissions! - */ - if (namespace != null) { - for (KlabStatement statement : namespace.getStatements()) { - if (urn.equals(statement.getUrn())) { - ret.getResults().add(new ResourceSet.Resource(getUrl().toString(), urn, - namespace.getProjectName(), - namespace.getVersion(), - KlabAsset.classify(statement))); - break; - } - } - } - } - } - case OBSERVABLE -> { - } - case REMOTE_URL -> { - } - case UNKNOWN -> { - } + Set namespaces = new HashSet<>(); + for (ResourceSet.Resource result : resourceSet.getResults()) { + if (Urn.classify(result.getResourceUrn()) == Urn.Type.KIM_OBJECT) { + if (result.getKnowledgeClass() == KnowledgeClass.NAMESPACE) { + namespaces.add(result.getResourceUrn()); + } else if (result.getKnowledgeClass() == KnowledgeClass.MODEL + || result.getKnowledgeClass() == KnowledgeClass.DEFINITION) { + namespaces.add(Utils.Paths.getLeading(result.getResourceUrn(), '.')); } - - return addDependencies(ret, scope); + } } - /* - * TODO add dependencies to resource set containing only local resources, - * including merging any remote resources in view of the passed scope. SET TO - * EMPTY if dependencies cannot be resolved in this scope. - */ - private ResourceSet addDependencies(ResourceSet resourceSet, Scope scope) { - - if (resourceSet.getResults().isEmpty()) { - resourceSet.setEmpty(true); - return resourceSet; - } - - Set namespaces = new HashSet<>(); - for (ResourceSet.Resource result : resourceSet.getResults()) { - if (Urn.classify(result.getResourceUrn()) == Urn.Type.KIM_OBJECT) { - if (result.getKnowledgeClass() == KnowledgeClass.NAMESPACE) { - namespaces.add(result.getResourceUrn()); - } else if (result.getKnowledgeClass() == KnowledgeClass.MODEL || result.getKnowledgeClass() == KnowledgeClass.DEFINITION) { - namespaces.add(Utils.Paths.getLeading(result.getResourceUrn(), '.')); - } - } - } - - addNamespacesToResultSet(namespaces, resourceSet, scope); + addNamespacesToResultSet(namespaces, resourceSet, scope); - /* - * add components and action libraries to behaviors - * - * add loaded namespaces and the deps (projects, components) of all projects - * that are required by their projects. Function calls may reference local - * resources. - * - * Resources may be using other resources - */ - - return resourceSet; - } - - /** - * Replicate a remote scope in the scope manager. This should be called by the runtime service after - * creating it so if the scope has no ID we issue an error, as we do not create independent scopes. + /* + * add components and action libraries to behaviors * - * @param sessionScope a client scope that should record the ID for future communication. If the ID is - * null, the call has failed. - * @return - */ - @Override - public String registerSession(SessionScope sessionScope) { - - if (sessionScope instanceof ServiceSessionScope serviceSessionScope) { - - if (sessionScope.getId() == null) { - throw new KlabIllegalArgumentException( - "resolver: session scope has no ID, cannot register " + "a scope autonomously"); - } - - getScopeManager().registerScope(serviceSessionScope, capabilities(sessionScope).getBrokerURI()); - return serviceSessionScope.getId(); - } - - throw new KlabIllegalArgumentException("unexpected scope class"); - } - - /** - * Replicate a remote scope in the scope manager. This should be called by the runtime service after - * creating it so if the scope has no ID we issue an error, as we do not create independent scopes. + * add loaded namespaces and the deps (projects, components) of all projects + * that are required by their projects. Function calls may reference local + * resources. * - * @param contextScope a client scope that should record the ID for future communication. If the ID is - * null, the call has failed. - * @return + * Resources may be using other resources */ - @Override - public String registerContext(ContextScope contextScope) { - - if (contextScope instanceof ServiceContextScope serviceContextScope) { - - if (contextScope.getId() == null) { - throw new KlabIllegalArgumentException( - "resolver: context scope has no ID, cannot register " + "a scope autonomously"); - } - getScopeManager().registerScope(serviceContextScope, capabilities(contextScope).getBrokerURI()); - return serviceContextScope.getId(); - } - - throw new KlabIllegalArgumentException("unexpected scope class"); + return resourceSet; + } + + /** + * Replicate a remote scope in the scope manager. This should be called by the runtime service + * after creating it so if the scope has no ID we issue an error, as we do not create independent + * scopes. + * + * @param sessionScope a client scope that should record the ID for future communication. If the + * ID is null, the call has failed. + * @return + */ + @Override + public String registerSession(SessionScope sessionScope) { + + if (sessionScope instanceof ServiceSessionScope serviceSessionScope) { + + if (sessionScope.getId() == null) { + throw new KlabIllegalArgumentException( + "resolver: session scope has no ID, cannot register " + "a scope autonomously"); + } + + getScopeManager() + .registerScope(serviceSessionScope, capabilities(sessionScope).getBrokerURI()); + return serviceSessionScope.getId(); + } + throw new KlabIllegalArgumentException("unexpected scope class"); + } + + /** + * Replicate a remote scope in the scope manager. This should be called by the runtime service + * after creating it so if the scope has no ID we issue an error, as we do not create independent + * scopes. + * + * @param contextScope a client scope that should record the ID for future communication. If the + * ID is null, the call has failed. + * @return + */ + @Override + public String registerContext(ContextScope contextScope) { + + if (contextScope instanceof ServiceContextScope serviceContextScope) { + + if (contextScope.getId() == null) { + throw new KlabIllegalArgumentException( + "resolver: context scope has no ID, cannot register " + "a scope autonomously"); + } + + getScopeManager() + .registerScope(serviceContextScope, capabilities(contextScope).getBrokerURI()); + return serviceContextScope.getId(); } + throw new KlabIllegalArgumentException("unexpected scope class"); + } } diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/embedded/ResourcesServiceInstance.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/embedded/ResourcesServiceInstance.java index d1aed82d6..a1cbf8766 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/embedded/ResourcesServiceInstance.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/embedded/ResourcesServiceInstance.java @@ -12,18 +12,19 @@ public class ResourcesServiceInstance extends ServiceInstance { - @Override - protected List getEssentialServices() { - return Collections.emptyList(); - } + @Override + protected List getEssentialServices() { + return Collections.emptyList(); + } - @Override - protected List getOperationalServices() { - return List.of(KlabService.Type.REASONER); - } + @Override + protected List getOperationalServices() { + return List.of(KlabService.Type.REASONER); + } - @Override - protected ResourcesProvider createPrimaryService(AbstractServiceDelegatingScope serviceScope, ServiceStartupOptions options) { - return new ResourcesProvider(serviceScope, options); - } + @Override + protected ResourcesProvider createPrimaryService( + AbstractServiceDelegatingScope serviceScope, ServiceStartupOptions options) { + return new ResourcesProvider(serviceScope, options); + } } diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/DelegatingValidationScope.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/DelegatingValidationScope.java index 45c77ab35..d70ee7ee7 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/DelegatingValidationScope.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/DelegatingValidationScope.java @@ -15,45 +15,50 @@ public abstract class DelegatingValidationScope implements ReasoningValidationScope { - Map descriptorMap = new HashMap<>(); - - @Override - public ConceptDescriptor getConceptDescriptor(String conceptUrn) { - if (!descriptorMap.containsKey(conceptUrn) && resourcesService() != null) { - var descriptor = resourcesService().describeConcept(conceptUrn); - descriptorMap.put(conceptUrn, descriptor == null ? null : - new ConceptDescriptor(descriptor.namespace(), - descriptor.conceptName(), - WorldviewValidationScope.getMainType(descriptor.mainDeclaredType()), - descriptor.label(), descriptor.description(), - descriptor.isAbstract(), false)); - } - return descriptorMap.get(conceptUrn); - } + Map descriptorMap = new HashMap<>(); - @Override - public LanguageValidationScope contextualize(EObject context) { - return this; + @Override + public ConceptDescriptor getConceptDescriptor(String conceptUrn) { + if (!descriptorMap.containsKey(conceptUrn) && resourcesService() != null) { + var descriptor = resourcesService().describeConcept(conceptUrn); + descriptorMap.put( + conceptUrn, + descriptor == null + ? null + : new ConceptDescriptor( + descriptor.namespace(), + descriptor.conceptName(), + WorldviewValidationScope.getMainType(descriptor.mainDeclaredType()), + descriptor.label(), + descriptor.description(), + descriptor.isAbstract(), + false)); } + return descriptorMap.get(conceptUrn); + } - @Override - public SemanticSyntax.Type validate(SemanticSyntax concept, List messages) { - if (reasoner() != null) { - // TODO - } - return null; - } + @Override + public LanguageValidationScope contextualize(EObject context) { + return this; + } - @Override - public List validateObservable(ObservableSyntax observable) { - if (reasoner() != null) { - // TODO - } - return Collections.emptyList(); + @Override + public SemanticSyntax.Type validate(SemanticSyntax concept, List messages) { + if (reasoner() != null) { + // TODO } + return null; + } - protected abstract Reasoner reasoner(); + @Override + public List validateObservable(ObservableSyntax observable) { + if (reasoner() != null) { + // TODO + } + return Collections.emptyList(); + } - protected abstract ResourcesService resourcesService(); + protected abstract Reasoner reasoner(); + protected abstract ResourcesService resourcesService(); } diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/LanguageAdapter.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/LanguageAdapter.java index 2e23821ee..36489fdb9 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/LanguageAdapter.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/LanguageAdapter.java @@ -24,826 +24,1065 @@ import java.util.*; -/** - * Adapter to substitute the current ones, based on older k.IM grammars. - */ +/** Adapter to substitute the current ones, based on older k.IM grammars. */ public enum LanguageAdapter { + INSTANCE; - INSTANCE; + Map instanceAnnotations = new HashMap<>(); + Map> instanceImplementations = new HashMap<>(); - Map instanceAnnotations = new HashMap<>(); - Map> instanceImplementations = new HashMap<>(); + public boolean registerInstanceClass(Instance annotation, Class annotated) { - public boolean registerInstanceClass(Instance annotation, Class annotated) { - - if (instanceAnnotations.containsKey(annotation.value())) { - return false; - } - - instanceAnnotations.put(annotation.value(), annotation); - instanceImplementations.put(annotation.value(), annotated); + if (instanceAnnotations.containsKey(annotation.value())) { + return false; + } - return true; + instanceAnnotations.put(annotation.value(), annotation); + instanceImplementations.put(annotation.value(), annotated); + + return true; + } + + public KimObservable adaptObservable( + ObservableSyntax observableSyntax, + String namespace, + String projectName, + KlabAsset.KnowledgeClass documentClass) { + + KimObservableImpl ret = new KimObservableImpl(); + + ret.setLength(observableSyntax.getCodeLength()); + ret.setOffsetInDocument(observableSyntax.getCodeOffset()); + ret.setUrn(observableSyntax.encode()); + ret.setNamespace(namespace); + if (observableSyntax.getSemantics().isPattern()) { + ret.setPattern(observableSyntax.getSemantics().encode()); + ret.getPatternVariables().addAll(observableSyntax.getSemantics().getPatternVariables()); + } else { + ret.setSemantics( + adaptSemantics(observableSyntax.getSemantics(), namespace, projectName, documentClass)); + ret.setCodeName( + ret.getSemantics().getType().contains(SemanticType.NOTHING) + ? "invalid_observable" + : observableSyntax.codeName()); + ret.setReferenceName(observableSyntax.referenceName()); + ret.setFormalName(observableSyntax.getStatedName()); } - public KimObservable adaptObservable(ObservableSyntax observableSyntax, String namespace, - String projectName, KlabAsset.KnowledgeClass documentClass) { + ret.setProjectName(projectName); + ret.setDocumentClass(documentClass); - KimObservableImpl ret = new KimObservableImpl(); + // TODO value ops - ret.setLength(observableSyntax.getCodeLength()); - ret.setOffsetInDocument(observableSyntax.getCodeOffset()); - ret.setUrn(observableSyntax.encode()); - ret.setNamespace(namespace); - if (observableSyntax.getSemantics().isPattern()) { - ret.setPattern(observableSyntax.getSemantics().encode()); - ret.getPatternVariables().addAll(observableSyntax.getSemantics().getPatternVariables()); - } else { - ret.setSemantics(adaptSemantics(observableSyntax.getSemantics(), namespace, projectName, - documentClass)); - ret.setCodeName(ret.getSemantics().getType().contains(SemanticType.NOTHING) - ? "invalid_observable" - : observableSyntax.codeName()); - ret.setReferenceName(observableSyntax.referenceName()); - ret.setFormalName(observableSyntax.getStatedName()); - } + return ret; + } - ret.setProjectName(projectName); - ret.setDocumentClass(documentClass); + public KimConcept adaptSemantics( + SemanticSyntax semantics, + String namespace, + String projectName, + KlabAsset.KnowledgeClass documentClass) { - // TODO value ops + List tokens = new ArrayList<>(); - return ret; + for (var token : semantics) { + tokens.add(adaptSemanticToken(token, namespace, projectName, documentClass)); } - public KimConcept adaptSemantics(SemanticSyntax semantics, String namespace, String projectName, - KlabAsset.KnowledgeClass documentClass) { - - List tokens = new ArrayList<>(); - - for (var token : semantics) { - tokens.add(adaptSemanticToken(token, namespace, projectName, documentClass)); - } - - if (tokens.isEmpty()) { - return null; - } + if (tokens.isEmpty()) { + return null; + } - // TODO first thing check if there are AND or OR restrictions and behave accordingly + // TODO first thing check if there are AND or OR restrictions and behave accordingly - KimConceptImpl ret = null; - Set type = null; - List roles = new ArrayList<>(); - List traits = new ArrayList<>(); + KimConceptImpl ret = null; + Set type = null; + List roles = new ArrayList<>(); + List traits = new ArrayList<>(); - for (var token : tokens) { + for (var token : tokens) { - if (token.getType().contains(SemanticType.OBSERVABLE)) { - ret = token; - } else if (token.getType().contains(SemanticType.ROLE)) { - roles.add(token); - } else if (token.getType().contains(SemanticType.PREDICATE)) { - traits.add(token); - } - } + if (token.getType().contains(SemanticType.OBSERVABLE)) { + ret = token; + } else if (token.getType().contains(SemanticType.ROLE)) { + roles.add(token); + } else if (token.getType().contains(SemanticType.PREDICATE)) { + traits.add(token); + } + } - if (ret == null) { - // no observable - ret = tokens.getFirst(); - traits.remove(ret); - roles.remove(ret); - } + if (ret == null) { + // no observable + ret = tokens.getFirst(); + traits.remove(ret); + roles.remove(ret); + } - roles.sort(new Comparator() { - @Override - public int compare(KimConcept o1, KimConcept o2) { - return o1.getUrn().compareTo(o2.getUrn()); - } + roles.sort( + new Comparator() { + @Override + public int compare(KimConcept o1, KimConcept o2) { + return o1.getUrn().compareTo(o2.getUrn()); + } }); - traits.sort(new Comparator() { - @Override - public int compare(KimConcept o1, KimConcept o2) { - return o1.getUrn().compareTo(o2.getUrn()); - } + traits.sort( + new Comparator() { + @Override + public int compare(KimConcept o1, KimConcept o2) { + return o1.getUrn().compareTo(o2.getUrn()); + } }); -// -// if (semantics.getUnaryOperator() != null && semantics.getUnaryOperator().getFirst() != null) { -// ret.setSemanticModifier(UnarySemanticOperator.valueOf(semantics.getUnaryOperator().getFirst().name())); -// if (semantics.getUnaryOperator().getSecond() != null) { -// // TODO not sure we have any situation when there is more than one secondary concept -// ret.setComparisonConcept(adaptSemantics(semantics.getUnaryOperator().getSecond().getFirst(), namespace -// , projectName, documentClass)); -// } -// } -// -// for (var restriction : semantics.getRestrictions()) { -// // TODO handle "each" - collective character of the argument, not sure that happens -// var restricting = restriction.getSecond(); -// switch (restriction.getFirst()) { -// case OF -> { -// ret.setInherent(adaptSemantics(restricting.getFirst(), namespace,projectName, documentClass)); -// } -// case FOR -> { -// ret.setGoal(adaptSemantics(restricting.getFirst(), namespace,projectName, documentClass)); -// } -// case WITH -> { -// ret.setCompresent(adaptSemantics(restricting.getFirst(), namespace,projectName, documentClass)); -// } -// case ADJACENT -> { -// ret.setAdjacent(adaptSemantics(restricting.getFirst(), namespace,projectName, documentClass)); -// } -// case CAUSING -> { -// ret.setCaused(adaptSemantics(restricting.getFirst(), namespace,projectName, documentClass)); -// } -// case CAUSED_BY -> { -// ret.setCausant(adaptSemantics(restricting.getFirst(), namespace,projectName, documentClass)); -// } -// case LINKING -> { -// ret.setRelationshipSource(adaptSemantics(restricting.getFirst(), namespace,projectName, documentClass)); -// ret.setRelationshipTarget(adaptSemantics(restricting.get(1), namespace,projectName, documentClass)); -// } -// case DURING -> { -// ret.setCooccurrent(adaptSemantics(restricting.getFirst(), namespace,projectName, documentClass)); -// } -// default -> throw new KlabInternalErrorException("Unexpected concept restriction with semantic clause " + restriction.getFirst()); -// } -// } - - - // rebuild urn - StringBuilder urn = new StringBuilder(); - - for (var role : roles) { - urn.append(urn.isEmpty() ? "" : " ").append(role.getUrn()); - } - for (var trait : traits) { - urn.append(urn.isEmpty() ? "" : " ").append(trait.getUrn()); + // + // if (semantics.getUnaryOperator() != null && semantics.getUnaryOperator().getFirst() != + // null) { + // + // ret.setSemanticModifier(UnarySemanticOperator.valueOf(semantics.getUnaryOperator().getFirst().name())); + // if (semantics.getUnaryOperator().getSecond() != null) { + // // TODO not sure we have any situation when there is more than one secondary + // concept + // + // ret.setComparisonConcept(adaptSemantics(semantics.getUnaryOperator().getSecond().getFirst(), + // namespace + // , projectName, documentClass)); + // } + // } + // + // for (var restriction : semantics.getRestrictions()) { + // // TODO handle "each" - collective character of the argument, not sure that + // happens + // var restricting = restriction.getSecond(); + // switch (restriction.getFirst()) { + // case OF -> { + // ret.setInherent(adaptSemantics(restricting.getFirst(), + // namespace,projectName, documentClass)); + // } + // case FOR -> { + // ret.setGoal(adaptSemantics(restricting.getFirst(), namespace,projectName, + // documentClass)); + // } + // case WITH -> { + // ret.setCompresent(adaptSemantics(restricting.getFirst(), + // namespace,projectName, documentClass)); + // } + // case ADJACENT -> { + // ret.setAdjacent(adaptSemantics(restricting.getFirst(), + // namespace,projectName, documentClass)); + // } + // case CAUSING -> { + // ret.setCaused(adaptSemantics(restricting.getFirst(), + // namespace,projectName, documentClass)); + // } + // case CAUSED_BY -> { + // ret.setCausant(adaptSemantics(restricting.getFirst(), + // namespace,projectName, documentClass)); + // } + // case LINKING -> { + // ret.setRelationshipSource(adaptSemantics(restricting.getFirst(), + // namespace,projectName, documentClass)); + // ret.setRelationshipTarget(adaptSemantics(restricting.get(1), + // namespace,projectName, documentClass)); + // } + // case DURING -> { + // ret.setCooccurrent(adaptSemantics(restricting.getFirst(), + // namespace,projectName, documentClass)); + // } + // default -> throw new KlabInternalErrorException("Unexpected concept + // restriction with semantic clause " + restriction.getFirst()); + // } + // } + + // rebuild urn + StringBuilder urn = new StringBuilder(); + + for (var role : roles) { + urn.append(urn.isEmpty() ? "" : " ").append(role.getUrn()); + } + for (var trait : traits) { + urn.append(urn.isEmpty() ? "" : " ").append(trait.getUrn()); + } + urn.append(urn.isEmpty() ? "" : " ").append(ret.getUrn()); + + ret.setUrn(urn.toString()); + ret.getTraits().addAll(traits); + ret.getRoles().addAll(roles); + + return ret; + } + + public KimConceptImpl adaptSemanticToken( + SemanticSyntax semantics, + String namespace, + String projectName, + KlabAsset.KnowledgeClass documentClass) { + + KimConceptImpl ret = new KimConceptImpl(); + + ret.setLength(semantics.getCodeLength()); + ret.setOffsetInDocument(semantics.getCodeOffset()); + ret.setType(adaptSemanticType(semantics.getType())); + ret.setNegated(semantics.isNegated()); + ret.setCollective(semantics.isCollective()); + ret.setCodeName(semantics.codeName()); + ret.setDeprecation(semantics.getDeprecation()); + ret.setDeprecated(semantics.getDeprecation() != null); + ret.setNamespace(namespace); + ret.setProjectName(projectName); + ret.setDocumentClass(documentClass); + ret.setPattern(semantics.isPattern()); + ret.getPatternVariables().addAll(semantics.getPatternVariables()); + + if (semantics.isLeafDeclaration()) { + ret.setName(semantics.encode()); + } else { + if (semantics.getType().is(SemanticSyntax.TypeCategory.VALID)) { + ret.setObservable(adaptSemantics(semantics.getObservable(), documentClass)); + } else { + ret.setObservable(KimConceptImpl.nothing()); + ret.setCodeName("invalid_concept"); + } + for (var cr : semantics.getConceptReferences()) { + var trait = adaptSemantics(cr, documentClass); + if (trait.is(SemanticType.ROLE)) { + ret.getRoles().add(trait); + } else if (trait.is(SemanticType.TRAIT)) { + ret.getTraits().add(trait); } - urn.append(urn.isEmpty() ? "" : " ").append(ret.getUrn()); - - ret.setUrn(urn.toString()); - ret.getTraits().addAll(traits); - ret.getRoles().addAll(roles); + } + } - return ret; + if (semantics.getUnaryOperator() != null && semantics.getUnaryOperator().getFirst() != null) { + ret.setSemanticModifier( + UnarySemanticOperator.valueOf(semantics.getUnaryOperator().getFirst().name())); + if (semantics.getUnaryOperator().getSecond() != null) { + // TODO not sure we have any situation when there is more than one secondary concept + ret.setComparisonConcept( + adaptSemantics( + semantics.getUnaryOperator().getSecond().getFirst(), + namespace, + projectName, + documentClass)); + } } - public KimConceptImpl adaptSemanticToken(SemanticSyntax semantics, String namespace, String projectName, - KlabAsset.KnowledgeClass documentClass) { - - KimConceptImpl ret = new KimConceptImpl(); - - ret.setLength(semantics.getCodeLength()); - ret.setOffsetInDocument(semantics.getCodeOffset()); - ret.setType(adaptSemanticType(semantics.getType())); - ret.setNegated(semantics.isNegated()); - ret.setCollective(semantics.isCollective()); - ret.setCodeName(semantics.codeName()); - ret.setDeprecation(semantics.getDeprecation()); - ret.setDeprecated(semantics.getDeprecation() != null); - ret.setNamespace(namespace); - ret.setProjectName(projectName); - ret.setDocumentClass(documentClass); - ret.setPattern(semantics.isPattern()); - ret.getPatternVariables().addAll(semantics.getPatternVariables()); - - if (semantics.isLeafDeclaration()) { - ret.setName(semantics.encode()); - } else { - if (semantics.getType().is(SemanticSyntax.TypeCategory.VALID)) { - ret.setObservable(adaptSemantics(semantics.getObservable(), documentClass)); - } else { - ret.setObservable(KimConceptImpl.nothing()); - ret.setCodeName("invalid_concept"); - } - for (var cr : semantics.getConceptReferences()) { - var trait = adaptSemantics(cr, documentClass); - if (trait.is(SemanticType.ROLE)) { - ret.getRoles().add(trait); - } else if (trait.is(SemanticType.TRAIT)) { - ret.getTraits().add(trait); - } - } + for (var restriction : semantics.getRestrictions()) { + switch (restriction.getFirst()) { + case OF -> { + ret.setInherent( + adaptSemantics( + restriction.getSecond().getFirst(), namespace, projectName, documentClass)); } - - if (semantics.getUnaryOperator() != null && semantics.getUnaryOperator().getFirst() != null) { - ret.setSemanticModifier(UnarySemanticOperator.valueOf(semantics.getUnaryOperator().getFirst().name())); - if (semantics.getUnaryOperator().getSecond() != null) { - // TODO not sure we have any situation when there is more than one secondary concept - ret.setComparisonConcept(adaptSemantics(semantics.getUnaryOperator().getSecond().getFirst(), namespace - , projectName, documentClass)); - } + case FOR -> { + ret.setGoal( + adaptSemantics( + restriction.getSecond().getFirst(), namespace, projectName, documentClass)); } - - - for (var restriction : semantics.getRestrictions()) { - switch (restriction.getFirst()) { - case OF -> { - ret.setInherent(adaptSemantics(restriction.getSecond().getFirst(), namespace, - projectName, documentClass)); - } - case FOR -> { - ret.setGoal(adaptSemantics(restriction.getSecond().getFirst(), namespace, projectName, - documentClass)); - } - case WITH -> { - ret.setCompresent(adaptSemantics(restriction.getSecond().getFirst(), namespace, - projectName, documentClass)); - } - case ADJACENT -> { - ret.setAdjacent(adaptSemantics(restriction.getSecond().getFirst(), namespace, - projectName, documentClass)); - } - case OR -> { - } - case AND -> { - } - case CAUSING -> { - ret.setCaused(adaptSemantics(restriction.getSecond().getFirst(), namespace, projectName - , documentClass)); - } - case CAUSED_BY -> { - ret.setCausant(adaptSemantics(restriction.getSecond().getFirst(), namespace, - projectName, documentClass)); - } - case LINKING -> { - ret.setRelationshipSource(adaptSemantics(restriction.getSecond().get(0), namespace, - projectName, documentClass)); - ret.setRelationshipTarget(adaptSemantics(restriction.getSecond().get(1), namespace, - projectName, documentClass)); - } - case CONTAINING -> { - // TODO - throw new IllegalStateException("no syntax for containment"); - } - case CONTAINED_IN -> { - // TODO - throw new IllegalStateException("no syntax for containment"); - } - case DURING -> { // TODO missing DURING_EACH - but is it necessary? - ret.setCooccurrent(adaptSemantics(restriction.getSecond().get(0), namespace, - projectName, documentClass)); - } - } + case WITH -> { + ret.setCompresent( + adaptSemantics( + restriction.getSecond().getFirst(), namespace, projectName, documentClass)); } - - // TODO establish abstract and generic nature - - ret.setUrn(ret.computeUrn()); - - return ret; - } - - public KimNamespace adaptNamespace(NamespaceSyntax namespace, String projectName, - Collection notifications) { - - var ret = new KimNamespaceImpl(); - ret.setUrn(namespace.getUrn()); - ret.setScenario(namespace.isScenario()); - ret.setSourceCode(namespace.getSourceCode()); - ret.setProjectName(projectName); - - // TODO ret.setImports(); and the rest - for (var statement : namespace.getStatements()) { - ret.getStatements().add(adaptStatement(statement, ret)); + case ADJACENT -> { + ret.setAdjacent( + adaptSemantics( + restriction.getSecond().getFirst(), namespace, projectName, documentClass)); } - - return ret; + case OR -> {} + case AND -> {} + case CAUSING -> { + ret.setCaused( + adaptSemantics( + restriction.getSecond().getFirst(), namespace, projectName, documentClass)); + } + case CAUSED_BY -> { + ret.setCausant( + adaptSemantics( + restriction.getSecond().getFirst(), namespace, projectName, documentClass)); + } + case LINKING -> { + ret.setRelationshipSource( + adaptSemantics( + restriction.getSecond().get(0), namespace, projectName, documentClass)); + ret.setRelationshipTarget( + adaptSemantics( + restriction.getSecond().get(1), namespace, projectName, documentClass)); + } + case CONTAINING -> { + // TODO + throw new IllegalStateException("no syntax for containment"); + } + case CONTAINED_IN -> { + // TODO + throw new IllegalStateException("no syntax for containment"); + } + case DURING -> { // TODO missing DURING_EACH - but is it necessary? + ret.setCooccurrent( + adaptSemantics( + restriction.getSecond().get(0), namespace, projectName, documentClass)); + } + } } - private KlabStatement adaptStatement(NamespaceStatementSyntax statement, KimNamespace namespace) { - return switch (statement) { - // case InstanceSyntax instance -> adaptInstance(instance, namespace); - case ModelSyntax model -> adaptModel(model, namespace); - case DefineSyntax define -> adaptDefine(define, namespace); - default -> null; - }; - } + // TODO establish abstract and generic nature - private KlabStatement adaptDefine(DefineSyntax define, KimNamespace namespace) { - KimSymbolDefinitionImpl ret = new KimSymbolDefinitionImpl(); - ret.setDeprecated(define.getDeprecation() != null); - ret.setDefineClass(define.getInstanceClass()); - ret.setUrn(namespace.getUrn() + "." + define.getName()); - ret.setOffsetInDocument(define.getCodeOffset()); - ret.setName(define.getName()); - ret.setLength(define.getCodeLength()); - ret.setNamespace(namespace.getUrn()); - ret.setProjectName(namespace.getProjectName()); - ret.setDefaulted(define.isDefaulted()); - ret.setDocumentClass(KlabAsset.KnowledgeClass.NAMESPACE); - ret.setValue(adaptValue(define.getValue(), namespace.getUrn(), namespace.getProjectName(), - KlabAsset.KnowledgeClass.NAMESPACE)); - return ret; - } + ret.setUrn(ret.computeUrn()); - /** - * Adapt any value that can be part of a literal, recursively unparsing its contents. We only keep the - * syntactic info for the top-level object. - * - * @param value - * @return - */ - private Object adaptValue(Object value, String namespace, String projectName, - KlabAsset.KnowledgeClass documentClass) { + return ret; + } - if (value == null) { - return null; - } + public KimNamespace adaptNamespace( + NamespaceSyntax namespace, String projectName, Collection notifications) { - Object object = value; - if (object instanceof ParsedLiteral parsedLiteral) { - if (parsedLiteral.isIdentifier()) { - return Identifier.create(parsedLiteral.getPod().toString()); - } - if (parsedLiteral.getCurrency() != null || parsedLiteral.getUnit() != null) { - QuantityImpl ret = new QuantityImpl(); - ret.setCurrency(parsedLiteral.getCurrency()); - ret.setUnit(parsedLiteral.getUnit()); - ret.setValue(parsedLiteral.getPod() instanceof Number number ? number : 0); - return ret; - } - object = adaptValue(parsedLiteral.getPod(), namespace, projectName, documentClass); - if (object == null) { - return null; - } - } /*else if (object instanceof Literal literal) { - object = literal.get(Object.class); - } */ else if (object instanceof ObservableSyntax observableSyntax) { - object = adaptObservable(observableSyntax, namespace, projectName, documentClass); - } else if (object instanceof SemanticSyntax semanticSyntax) { - object = adaptSemantics(semanticSyntax, namespace, projectName, documentClass); - } + var ret = new KimNamespaceImpl(); + ret.setUrn(namespace.getUrn()); + ret.setScenario(namespace.isScenario()); + ret.setSourceCode(namespace.getSourceCode()); + ret.setProjectName(projectName); - return switch (object) { - case Map map -> { - var ret = new LinkedHashMap(); - for (Object key : map.keySet()) { - ret.put(key, adaptValue(map.get(key), namespace, projectName, documentClass)); - } - yield ret; - } - case Collection collection -> { - var ret = new ArrayList<>(); - for (Object item : collection) { - ret.add(adaptValue(item, namespace, projectName, documentClass)); - } - yield ret; - } - case ObservableSyntax observableSyntax -> { - yield adaptObservable(observableSyntax, namespace, projectName, documentClass); - } - case RangeLiteral rangeLiteral -> { - var range = new NumericRangeImpl(); - range.setLowerBound(rangeLiteral.getFrom().doubleValue()); - range.setUpperBound(rangeLiteral.getTo().doubleValue()); - range.setLowerExclusive(!rangeLiteral.isLeftInclusive()); - range.setUpperOpen(!rangeLiteral.isRightInclusive()); - yield range; - } - default -> { - yield object; - } - }; + // TODO ret.setImports(); and the rest + for (var statement : namespace.getStatements()) { + ret.getStatements().add(adaptStatement(statement, ret)); } + return ret; + } + + private KlabStatement adaptStatement(NamespaceStatementSyntax statement, KimNamespace namespace) { + return switch (statement) { + // case InstanceSyntax instance -> adaptInstance(instance, namespace); + case ModelSyntax model -> adaptModel(model, namespace); + case DefineSyntax define -> adaptDefine(define, namespace); + default -> null; + }; + } + + private KlabStatement adaptDefine(DefineSyntax define, KimNamespace namespace) { + KimSymbolDefinitionImpl ret = new KimSymbolDefinitionImpl(); + ret.setDeprecated(define.getDeprecation() != null); + ret.setDefineClass(define.getInstanceClass()); + ret.setUrn(namespace.getUrn() + "." + define.getName()); + ret.setOffsetInDocument(define.getCodeOffset()); + ret.setName(define.getName()); + ret.setLength(define.getCodeLength()); + ret.setNamespace(namespace.getUrn()); + ret.setProjectName(namespace.getProjectName()); + ret.setDefaulted(define.isDefaulted()); + ret.setDocumentClass(KlabAsset.KnowledgeClass.NAMESPACE); + ret.setValue( + adaptValue( + define.getValue(), + namespace.getUrn(), + namespace.getProjectName(), + KlabAsset.KnowledgeClass.NAMESPACE)); + return ret; + } + + /** + * Adapt any value that can be part of a literal, recursively unparsing its contents. We only keep + * the syntactic info for the top-level object. + * + * @param value + * @return + */ + private Object adaptValue( + Object value, String namespace, String projectName, KlabAsset.KnowledgeClass documentClass) { + + if (value == null) { + return null; + } - private Notification.LexicalContext asLexicalContext(ParsedObject object) { - // TODO + Object object = value; + if (object instanceof ParsedLiteral parsedLiteral) { + if (parsedLiteral.isIdentifier()) { + return Identifier.create(parsedLiteral.getPod().toString()); + } + if (parsedLiteral.getCurrency() != null || parsedLiteral.getUnit() != null) { + QuantityImpl ret = new QuantityImpl(); + ret.setCurrency(parsedLiteral.getCurrency()); + ret.setUnit(parsedLiteral.getUnit()); + ret.setValue(parsedLiteral.getPod() instanceof Number number ? number : 0); + return ret; + } + object = adaptValue(parsedLiteral.getPod(), namespace, projectName, documentClass); + if (object == null) { return null; + } + } /*else if (object instanceof Literal literal) { + object = literal.get(Object.class); + } */ else if (object instanceof ObservableSyntax observableSyntax) { + object = adaptObservable(observableSyntax, namespace, projectName, documentClass); + } else if (object instanceof SemanticSyntax semanticSyntax) { + object = adaptSemantics(semanticSyntax, namespace, projectName, documentClass); } - private KlabStatement adaptModel(ModelSyntax model, KimNamespace namespace) { - - KimModelImpl ret = new KimModelImpl(); - - ret.setNamespace(namespace.getUrn()); - ret.setDeprecated(model.getDeprecation() != null); - ret.setDeprecation(model.getDeprecation()); - ret.setUrn(namespace.getUrn() + "." + model.getName()); - // ret.setName(model.getName()); - ret.setOffsetInDocument(model.getCodeOffset()); - ret.setLength(model.getCodeLength()); - ret.setProjectName(namespace.getProjectName()); - ret.setDocumentClass(KlabAsset.KnowledgeClass.NAMESPACE); - - // TODO docstring set through next-gen literate programming features - - boolean inactive = false; - for (var observable : model.getObservables()) { - var obs = adaptObservable(observable, namespace.getUrn(), namespace.getProjectName(), - KlabAsset.KnowledgeClass.NAMESPACE); - ret.getObservables().add(obs); - if (obs.getSemantics().is(SemanticType.NOTHING)) { - inactive = true; - } - } - for (var dependency : model.getDependencies()) { - var obs = adaptObservable(dependency, namespace.getUrn(), namespace.getProjectName(), - KlabAsset.KnowledgeClass.NAMESPACE); - ret.getDependencies().add(obs); - if (obs.getSemantics().is(SemanticType.NOTHING)) { - inactive = true; - } + return switch (object) { + case Map map -> { + var ret = new LinkedHashMap(); + for (Object key : map.keySet()) { + ret.put(key, adaptValue(map.get(key), namespace, projectName, documentClass)); } - - ret.setInactive(inactive); - - for (var contextualizable : model.getContextualizations()) { - ret.getContextualization().add(adaptContextualizable(contextualizable, namespace)); + yield ret; + } + case Collection collection -> { + var ret = new ArrayList<>(); + for (Object item : collection) { + ret.add(adaptValue(item, namespace, projectName, documentClass)); } - - return ret; + yield ret; + } + case ObservableSyntax observableSyntax -> { + yield adaptObservable(observableSyntax, namespace, projectName, documentClass); + } + case RangeLiteral rangeLiteral -> { + var range = new NumericRangeImpl(); + range.setLowerBound(rangeLiteral.getFrom().doubleValue()); + range.setUpperBound(rangeLiteral.getTo().doubleValue()); + range.setLowerExclusive(!rangeLiteral.isLeftInclusive()); + range.setUpperOpen(!rangeLiteral.isRightInclusive()); + yield range; + } + default -> { + yield object; + } + }; + } + + private Notification.LexicalContext asLexicalContext(ParsedObject object) { + // TODO + return null; + } + + private KlabStatement adaptModel(ModelSyntax model, KimNamespace namespace) { + + KimModelImpl ret = new KimModelImpl(); + + ret.setNamespace(namespace.getUrn()); + ret.setDeprecated(model.getDeprecation() != null); + ret.setDeprecation(model.getDeprecation()); + ret.setUrn(namespace.getUrn() + "." + model.getName()); + // ret.setName(model.getName()); + ret.setOffsetInDocument(model.getCodeOffset()); + ret.setLength(model.getCodeLength()); + ret.setProjectName(namespace.getProjectName()); + ret.setDocumentClass(KlabAsset.KnowledgeClass.NAMESPACE); + + // TODO docstring set through next-gen literate programming features + + boolean inactive = false; + for (var observable : model.getObservables()) { + var obs = + adaptObservable( + observable, + namespace.getUrn(), + namespace.getProjectName(), + KlabAsset.KnowledgeClass.NAMESPACE); + ret.getObservables().add(obs); + if (obs.getSemantics().is(SemanticType.NOTHING)) { + inactive = true; + } + } + for (var dependency : model.getDependencies()) { + var obs = + adaptObservable( + dependency, + namespace.getUrn(), + namespace.getProjectName(), + KlabAsset.KnowledgeClass.NAMESPACE); + ret.getDependencies().add(obs); + if (obs.getSemantics().is(SemanticType.NOTHING)) { + inactive = true; + } } - private Contextualizable adaptContextualizable(ModelSyntax.Contextualization contextualizable, - KimNamespace namespace) { - - var ret = new ContextualizableImpl(); - - ret.setOffsetInDocument(contextualizable.getCodeOffset()); - ret.setLength(contextualizable.getCodeLength()); - ret.setNamespace(namespace.getUrn()); - - if (contextualizable.getContextualizable() instanceof FunctionCallSyntax functionCallSyntax) { - ret.setServiceCall(adaptServiceCall(functionCallSyntax, namespace.getUrn(), - namespace.getProjectName(), KlabAsset.KnowledgeClass.MODEL)); - } else if (contextualizable.getContextualizable() instanceof ExpressionSyntax expressionSyntax) { - ret.setExpression(adaptExpression(expressionSyntax, namespace)); - } else { - // TODO all others - throw new KlabUnimplementedException("contextualizable " + contextualizable); - } + ret.setInactive(inactive); - return ret; + for (var contextualizable : model.getContextualizations()) { + ret.getContextualization().add(adaptContextualizable(contextualizable, namespace)); } - private ExpressionCode adaptExpression(ExpressionSyntax expressionSyntax, KimNamespace namespace) { - var ret = new ExpressionCodeImpl(); - ret.setCode(expressionSyntax.getCode()); - ret.setForcedScalar(expressionSyntax.isScalar()); - ret.setLanguage(expressionSyntax.getLanguage()); - return ret; - } - // - // private KlabStatement adaptInstance(InstanceSyntax instance, KimNamespace namespace) { - // return null; - // } - - private KimConcept adaptSemantics(SemanticSyntax.ConceptData observable, - KlabAsset.KnowledgeClass documentClass) { - KimConceptImpl ret = new KimConceptImpl(); - ret.setUrn(observable.concept().namespace() + ":" + observable.concept().conceptName()); - ret.setName(ret.getUrn()); - ret.setType(adaptSemanticType(observable.concept().mainType())); - ret.setDocumentClass(documentClass); - ret.computeUrn(); - return ret; + return ret; + } + + private Contextualizable adaptContextualizable( + ModelSyntax.Contextualization contextualizable, KimNamespace namespace) { + + var ret = new ContextualizableImpl(); + + ret.setOffsetInDocument(contextualizable.getCodeOffset()); + ret.setLength(contextualizable.getCodeLength()); + ret.setNamespace(namespace.getUrn()); + + if (contextualizable.getContextualizable() instanceof FunctionCallSyntax functionCallSyntax) { + ret.setServiceCall( + adaptServiceCall( + functionCallSyntax, + namespace.getUrn(), + namespace.getProjectName(), + KlabAsset.KnowledgeClass.MODEL)); + } else if (contextualizable.getContextualizable() + instanceof ExpressionSyntax expressionSyntax) { + ret.setExpression(adaptExpression(expressionSyntax, namespace)); + } else { + // TODO all others + throw new KlabUnimplementedException("contextualizable " + contextualizable); } - private Set adaptSemanticType(SemanticSyntax.Type type) { - var ret = switch (type) { - case VOID, NOTHING -> EnumSet.of(SemanticType.NOTHING); - case ACCELERATION -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.ACCELERATION); - case AMOUNT -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.AMOUNT); - case ANGLE -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.ANGLE); - case AREA -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.AREA); - case ATTRIBUTE -> EnumSet.of(SemanticType.PREDICATE, SemanticType.ATTRIBUTE, SemanticType.TRAIT); - case BOND -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.COUNTABLE, - SemanticType.DIRECT_OBSERVABLE, SemanticType.RELATIONSHIP, - SemanticType.BIDIRECTIONAL); - case CHARGE -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.CHARGE); - case CLASS -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUALITY, SemanticType.CLASS); - case CONFIGURATION -> EnumSet.of(SemanticType.DIRECT_OBSERVABLE, SemanticType.CONFIGURATION); - case DOMAIN -> EnumSet.of(SemanticType.PREDICATE, SemanticType.DOMAIN); - case DURATION -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.DURATION); - case ELECTRIC_POTENTIAL -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.ELECTRIC_POTENTIAL); - case ENERGY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.ENERGY); - case ENTROPY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.ENTROPY); - case EVENT -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.COUNTABLE, SemanticType.EVENT); - case EXTENT -> EnumSet.of(SemanticType.EXTENT, SemanticType.QUALITY); - case FUNCTIONAL_RELATIONSHIP -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.COUNTABLE, - SemanticType.DIRECT_OBSERVABLE, SemanticType.RELATIONSHIP, - SemanticType.FUNCTIONAL); - case GENERIC_QUALITY -> - // this only happens with core im:Quality. It's deprecated and should not get here. - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUALITY); - case IDENTITY -> EnumSet.of(SemanticType.PREDICATE, SemanticType.IDENTITY, SemanticType.TRAIT); - case LENGTH -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.LENGTH); - case MASS -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.MASS); - case MONEY -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.MONEY); - case ORDERING -> - EnumSet.of(SemanticType.PREDICATE, SemanticType.ORDERING, SemanticType.TRAIT); // TODO - // attribute? - case PRESSURE -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.PRESSURE); - case PRIORITY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.PRIORITY); - case PROCESS -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.PROCESS); - case QUANTITY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.QUANTITY); - case AGENT -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.COUNTABLE, - SemanticType.DIRECT_OBSERVABLE, SemanticType.AGENT); - case REALM -> EnumSet.of(SemanticType.PREDICATE, SemanticType.ATTRIBUTE, SemanticType.TRAIT); - case RESISTANCE -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.RESISTANCE); - case RESISTIVITY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.RESISTIVITY); - case ROLE -> EnumSet.of(SemanticType.PREDICATE, SemanticType.ROLE); - case STRUCTURAL_RELATIONSHIP -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.COUNTABLE, - SemanticType.DIRECT_OBSERVABLE, SemanticType.RELATIONSHIP, - SemanticType.STRUCTURAL); - case SUBJECT -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.COUNTABLE, - SemanticType.DIRECT_OBSERVABLE, SemanticType.SUBJECT); - case TEMPERATURE -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.TEMPERATURE); - case VELOCITY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.VELOCITY); - case VISCOSITY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.VISCOSITY); - case MONETARY_VALUE -> null; - case VOLUME -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.VOLUME); - case WEIGHT -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.WEIGHT); - case PROBABILITY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.PROBABILITY); - case OCCURRENCE -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.OCCURRENCE); - case PERCENTAGE -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.PERCENTAGE); - case RATIO -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.RATIO); - case UNCERTAINTY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.UNCERTAINTY); - case VALUE -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.VALUE); - case PROPORTION -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.PROPORTION); - case RATE -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.RATE); - case PRESENCE -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUALITY, SemanticType.PRESENCE); - case MAGNITUDE -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.MAGNITUDE); - case NUMEROSITY -> - EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUANTIFIABLE, SemanticType.QUALITY, - SemanticType.NUMEROSITY); + return ret; + } + + private ExpressionCode adaptExpression( + ExpressionSyntax expressionSyntax, KimNamespace namespace) { + var ret = new ExpressionCodeImpl(); + ret.setCode(expressionSyntax.getCode()); + ret.setForcedScalar(expressionSyntax.isScalar()); + ret.setLanguage(expressionSyntax.getLanguage()); + return ret; + } + + // + // private KlabStatement adaptInstance(InstanceSyntax instance, KimNamespace namespace) { + // return null; + // } + + private KimConcept adaptSemantics( + SemanticSyntax.ConceptData observable, KlabAsset.KnowledgeClass documentClass) { + KimConceptImpl ret = new KimConceptImpl(); + ret.setUrn(observable.concept().namespace() + ":" + observable.concept().conceptName()); + ret.setName(ret.getUrn()); + ret.setType(adaptSemanticType(observable.concept().mainType())); + ret.setDocumentClass(documentClass); + ret.computeUrn(); + return ret; + } + + private Set adaptSemanticType(SemanticSyntax.Type type) { + var ret = + switch (type) { + case VOID, NOTHING -> EnumSet.of(SemanticType.NOTHING); + case ACCELERATION -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.ACCELERATION); + case AMOUNT -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.AMOUNT); + case ANGLE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.ANGLE); + case AREA -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.AREA); + case ATTRIBUTE -> + EnumSet.of(SemanticType.PREDICATE, SemanticType.ATTRIBUTE, SemanticType.TRAIT); + case BOND -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.COUNTABLE, + SemanticType.DIRECT_OBSERVABLE, + SemanticType.RELATIONSHIP, + SemanticType.BIDIRECTIONAL); + case CHARGE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.CHARGE); + case CLASS -> + EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUALITY, SemanticType.CLASS); + case CONFIGURATION -> + EnumSet.of(SemanticType.DIRECT_OBSERVABLE, SemanticType.CONFIGURATION); + case DOMAIN -> EnumSet.of(SemanticType.PREDICATE, SemanticType.DOMAIN); + case DURATION -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.DURATION); + case ELECTRIC_POTENTIAL -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.ELECTRIC_POTENTIAL); + case ENERGY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.ENERGY); + case ENTROPY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.ENTROPY); + case EVENT -> + EnumSet.of(SemanticType.OBSERVABLE, SemanticType.COUNTABLE, SemanticType.EVENT); + case EXTENT -> EnumSet.of(SemanticType.EXTENT, SemanticType.QUALITY); + case FUNCTIONAL_RELATIONSHIP -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.COUNTABLE, + SemanticType.DIRECT_OBSERVABLE, + SemanticType.RELATIONSHIP, + SemanticType.FUNCTIONAL); + case GENERIC_QUALITY -> + // this only happens with core im:Quality. It's deprecated and should not get here. + EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUALITY); + case IDENTITY -> + EnumSet.of(SemanticType.PREDICATE, SemanticType.IDENTITY, SemanticType.TRAIT); + case LENGTH -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.LENGTH); + case MASS -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.MASS); + case MONEY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.MONEY); + case ORDERING -> + EnumSet.of(SemanticType.PREDICATE, SemanticType.ORDERING, SemanticType.TRAIT); // TODO + // attribute? + case PRESSURE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.PRESSURE); + case PRIORITY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.PRIORITY); + case PROCESS -> EnumSet.of(SemanticType.OBSERVABLE, SemanticType.PROCESS); + case QUANTITY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.QUANTITY); + case AGENT -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.COUNTABLE, + SemanticType.DIRECT_OBSERVABLE, + SemanticType.AGENT); + case REALM -> + EnumSet.of(SemanticType.PREDICATE, SemanticType.ATTRIBUTE, SemanticType.TRAIT); + case RESISTANCE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.RESISTANCE); + case RESISTIVITY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.RESISTIVITY); + case ROLE -> EnumSet.of(SemanticType.PREDICATE, SemanticType.ROLE); + case STRUCTURAL_RELATIONSHIP -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.COUNTABLE, + SemanticType.DIRECT_OBSERVABLE, + SemanticType.RELATIONSHIP, + SemanticType.STRUCTURAL); + case SUBJECT -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.COUNTABLE, + SemanticType.DIRECT_OBSERVABLE, + SemanticType.SUBJECT); + case TEMPERATURE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.TEMPERATURE); + case VELOCITY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.VELOCITY); + case VISCOSITY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.VISCOSITY); + case MONETARY_VALUE -> null; + case VOLUME -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.VOLUME); + case WEIGHT -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.WEIGHT); + case PROBABILITY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.PROBABILITY); + case OCCURRENCE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.OCCURRENCE); + case PERCENTAGE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.PERCENTAGE); + case RATIO -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.RATIO); + case UNCERTAINTY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.UNCERTAINTY); + case VALUE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.VALUE); + case PROPORTION -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.PROPORTION); + case RATE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.RATE); + case PRESENCE -> + EnumSet.of(SemanticType.OBSERVABLE, SemanticType.QUALITY, SemanticType.PRESENCE); + case MAGNITUDE -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.MAGNITUDE); + case NUMEROSITY -> + EnumSet.of( + SemanticType.OBSERVABLE, + SemanticType.QUANTIFIABLE, + SemanticType.QUALITY, + SemanticType.NUMEROSITY); }; - // single source of truth for intensive/extensive nature - if (type.is(SemanticSyntax.TypeCategory.INTENSIVE)) { - ret.add(SemanticType.INTENSIVE); - } else if (type.is(SemanticSyntax.TypeCategory.EXTENSIVE)) { - ret.add(SemanticType.EXTENSIVE); - } - return ret; + // single source of truth for intensive/extensive nature + if (type.is(SemanticSyntax.TypeCategory.INTENSIVE)) { + ret.add(SemanticType.INTENSIVE); + } else if (type.is(SemanticSyntax.TypeCategory.EXTENSIVE)) { + ret.add(SemanticType.EXTENSIVE); } - - public KimObservationStrategyDocument adaptStrategies(ObservationStrategiesSyntax definition, - String projectName, - Collection notifications) { - - KimObservationStrategiesImpl ret = new KimObservationStrategiesImpl(); - ret.setUrn(definition.getUrn()); - ret.getNotifications().addAll(notifications); - ret.setSourceCode(definition.getSourceCode()); - ret.setProjectName(projectName); - - // we don't add source code here as each strategy has its own - for (var strategy : definition.getStrategies()) { - ret.getStatements().add(adaptStrategy(strategy, definition.getUrn(), projectName)); - } - return ret; + return ret; + } + + public KimObservationStrategyDocument adaptStrategies( + ObservationStrategiesSyntax definition, + String projectName, + Collection notifications) { + + KimObservationStrategiesImpl ret = new KimObservationStrategiesImpl(); + ret.setUrn(definition.getUrn()); + ret.getNotifications().addAll(notifications); + ret.setSourceCode(definition.getSourceCode()); + ret.setProjectName(projectName); + + // we don't add source code here as each strategy has its own + for (var strategy : definition.getStrategies()) { + ret.getStatements().add(adaptStrategy(strategy, definition.getUrn(), projectName)); } - - private ServiceCall adaptServiceCall(FunctionCallSyntax functionCallSyntax, String namespace, - String projectName, KlabAsset.KnowledgeClass documentClass) { - - ServiceCallImpl ret = new ServiceCallImpl(); - ret.setLength(functionCallSyntax.getCodeLength()); - ret.setOffsetInDocument(functionCallSyntax.getCodeOffset()); - ret.setNamespace(namespace); - ret.setProjectName(projectName); - ret.setUrn(functionCallSyntax.getName()); - ret.setSourceCode(functionCallSyntax.encode()); - - for (String key : functionCallSyntax.getArguments().keySet()) { - ret.getParameters().put(key, adaptValue(functionCallSyntax.getArguments().get(key), namespace, - projectName, documentClass)); - } - - // TODO unnamed parameters, annotations and all that - - return ret; + return ret; + } + + private ServiceCall adaptServiceCall( + FunctionCallSyntax functionCallSyntax, + String namespace, + String projectName, + KlabAsset.KnowledgeClass documentClass) { + + ServiceCallImpl ret = new ServiceCallImpl(); + ret.setLength(functionCallSyntax.getCodeLength()); + ret.setOffsetInDocument(functionCallSyntax.getCodeOffset()); + ret.setNamespace(namespace); + ret.setProjectName(projectName); + ret.setUrn(functionCallSyntax.getName()); + ret.setSourceCode(functionCallSyntax.encode()); + + for (String key : functionCallSyntax.getArguments().keySet()) { + ret.getParameters() + .put( + key, + adaptValue( + functionCallSyntax.getArguments().get(key), + namespace, + projectName, + documentClass)); } - private KimObservationStrategy adaptStrategy(ObservationStrategySyntax strategy, String namespace, - String projectName) { - - var ret = new KimObservationStrategyImpl(); - - ret.setRank(strategy.getRank()); - ret.setNamespace(namespace); - ret.setUrn(strategy.getName()); - ret.setDescription(strategy.getDescription()); - ret.setOffsetInDocument(strategy.getCodeOffset()); - ret.setLength(strategy.getCodeLength()); - ret.setDeprecation(strategy.getDeprecation()); - ret.setDeprecated(strategy.getDeprecation() != null); - ret.setProjectName(projectName); - ret.setDocumentClass(KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY_DOCUMENT); - - // these are multiple 'for' statements - for (var filter : strategy.getFilters()) { + // TODO unnamed parameters, annotations and all that - List filters = new ArrayList<>(); + return ret; + } - // and these are comma-separated filters in a 'for' - for (var match : filter.getMatch()) { + private KimObservationStrategy adaptStrategy( + ObservationStrategySyntax strategy, String namespace, String projectName) { - var f = new KimObservationStrategyImpl.FilterImpl(); - f.setNegated(match.isNegated()); - if (match.getObservable() != null /* which it should */) { - f.setMatch(adaptSemantics(match.getObservable(), namespace, projectName, - KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY)); - } + var ret = new KimObservationStrategyImpl(); - for (var condition : match.getConditions()) { - f.getFunctions().add(adaptServiceCall(condition, namespace, projectName, - KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY_DOCUMENT)); - } + ret.setRank(strategy.getRank()); + ret.setNamespace(namespace); + ret.setUrn(strategy.getName()); + ret.setDescription(strategy.getDescription()); + ret.setOffsetInDocument(strategy.getCodeOffset()); + ret.setLength(strategy.getCodeLength()); + ret.setDeprecation(strategy.getDeprecation()); + ret.setDeprecated(strategy.getDeprecation() != null); + ret.setProjectName(projectName); + ret.setDocumentClass(KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY_DOCUMENT); - f.setConnectorToPrevious(match.getConnectorToPrevious() == SemanticSyntax.Quantifier.ALL ? - LogicalConnector.INTERSECTION : LogicalConnector.UNION); + // these are multiple 'for' statements + for (var filter : strategy.getFilters()) { - filters.add(f); - } + List filters = new ArrayList<>(); - ret.getFilters().add(filters); + // and these are comma-separated filters in a 'for' + for (var match : filter.getMatch()) { - } - for (var operation : strategy.getOperations()) { - var o = new KimObservationStrategyImpl.OperationImpl(); - if (operation.getType() != null) { - o.setType(KimObservationStrategy.Operation.Type.valueOf(operation.getType().name())); - } - if (operation.getObservable() != null) { - o.setObservable(adaptObservable(operation.getObservable(), strategy.getName(), projectName, - KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY_DOCUMENT)); - } - if (!operation.getFunctions().isEmpty()) { - o.getFunctions().addAll(operation.getFunctions().stream().map(f -> adaptServiceCall(f, - namespace, projectName, KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY)).toList()); - } - if (!operation.getDeferredStrategies().isEmpty()) { - o.getDeferredStrategies().addAll(operation.getDeferredStrategies().stream().map(s -> adaptStrategy(s, namespace, projectName)).toList()); - } - ret.getOperations().add(o); + var f = new KimObservationStrategyImpl.FilterImpl(); + f.setNegated(match.isNegated()); + if (match.getObservable() != null /* which it should */) { + f.setMatch( + adaptSemantics( + match.getObservable(), + namespace, + projectName, + KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY)); } - for (var let : strategy.getMacroVariables().keySet()) { - var f = new KimObservationStrategyImpl.FilterImpl(); - String key = null; - if (let.isIdentifier()) { - key = let.toString(); - } else if (let.getPod() instanceof List list) { - key = Utils.Strings.join(list, ","); - } - if (key == null) { - ret.getNotifications().add(Notification.error("unrecognized argument for let statement", - let)); - continue; - } - - var filter = strategy.getMacroVariables().get(let); - if (filter.getObservable() != null) { - f.setMatch(adaptSemantics(filter.getObservable(), namespace, projectName, - KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY)); - } - for (var condition : filter.getConditions()) { - f.getFunctions().add(adaptServiceCall(condition, namespace, projectName, - KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY)); - } - ret.getMacroVariables().put(key, f); + for (var condition : match.getConditions()) { + f.getFunctions() + .add( + adaptServiceCall( + condition, + namespace, + projectName, + KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY_DOCUMENT)); } - return ret; - } - public KimOntology adaptOntology(OntologySyntax ontology, String projectName, - Collection notifications) { + f.setConnectorToPrevious( + match.getConnectorToPrevious() == SemanticSyntax.Quantifier.ALL + ? LogicalConnector.INTERSECTION + : LogicalConnector.UNION); - KimOntologyImpl ret = new KimOntologyImpl(); + filters.add(f); + } - ret.setUrn(ontology.getName()); - ret.getImportedOntologies().addAll(ontology.getImportedOntologies()); - ret.setSourceCode(ontology.getSourceCode()); - ret.getMetadata().put(Metadata.DC_COMMENT, ontology.getDescription()); - ret.setVersion(Version.create(ontology.getVersion())); - ret.setProjectName(projectName); - - if (ontology.getDomain() == OntologySyntax.rootDomain) { - ret.setDomain(KimOntology.rootDomain); - for (var owlImport : ontology.getImportedCoreOntologies().keySet()) { - ret.getOwlImports().add(Pair.of(owlImport, - ontology.getImportedCoreOntologies().get(owlImport))); - } - } else { - ret.setDomain(adaptSemantics(ontology.getDomain(), ontology.getName(), projectName, - KlabAsset.KnowledgeClass.ONTOLOGY)); - } - - for (var definition : ontology.getConceptDeclarations()) { - ret.getStatements().add(adaptConceptDefinition(definition, ontology.getName(), projectName)); - } + ret.getFilters().add(filters); + } + for (var operation : strategy.getOperations()) { + var o = new KimObservationStrategyImpl.OperationImpl(); + if (operation.getType() != null) { + o.setType(KimObservationStrategy.Operation.Type.valueOf(operation.getType().name())); + } + if (operation.getObservable() != null) { + o.setObservable( + adaptObservable( + operation.getObservable(), + strategy.getName(), + projectName, + KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY_DOCUMENT)); + } + if (!operation.getFunctions().isEmpty()) { + o.getFunctions() + .addAll( + operation.getFunctions().stream() + .map( + f -> + adaptServiceCall( + f, + namespace, + projectName, + KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY)) + .toList()); + } + if (!operation.getDeferredStrategies().isEmpty()) { + o.getDeferredStrategies() + .addAll( + operation.getDeferredStrategies().stream() + .map(s -> adaptStrategy(s, namespace, projectName)) + .toList()); + } + ret.getOperations().add(o); + } - ret.getNotifications().addAll(notifications); + for (var let : strategy.getMacroVariables().keySet()) { + var f = new KimObservationStrategyImpl.FilterImpl(); + String key = null; + if (let.isIdentifier()) { + key = let.toString(); + } else if (let.getPod() instanceof List list) { + key = Utils.Strings.join(list, ","); + } + if (key == null) { + ret.getNotifications() + .add(Notification.error("unrecognized argument for let statement", let)); + continue; + } + + var filter = strategy.getMacroVariables().get(let); + if (filter.getObservable() != null) { + f.setMatch( + adaptSemantics( + filter.getObservable(), + namespace, + projectName, + KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY)); + } + for (var condition : filter.getConditions()) { + f.getFunctions() + .add( + adaptServiceCall( + condition, + namespace, + projectName, + KlabAsset.KnowledgeClass.OBSERVATION_STRATEGY)); + } + ret.getMacroVariables().put(key, f); + } + return ret; + } + + public KimOntology adaptOntology( + OntologySyntax ontology, String projectName, Collection notifications) { + + KimOntologyImpl ret = new KimOntologyImpl(); + + ret.setUrn(ontology.getName()); + ret.getImportedOntologies().addAll(ontology.getImportedOntologies()); + ret.setSourceCode(ontology.getSourceCode()); + ret.getMetadata().put(Metadata.DC_COMMENT, ontology.getDescription()); + ret.setVersion(Version.create(ontology.getVersion())); + ret.setProjectName(projectName); + + if (ontology.getDomain() == OntologySyntax.rootDomain) { + ret.setDomain(KimOntology.rootDomain); + for (var owlImport : ontology.getImportedCoreOntologies().keySet()) { + ret.getOwlImports() + .add(Pair.of(owlImport, ontology.getImportedCoreOntologies().get(owlImport))); + } + } else { + ret.setDomain( + adaptSemantics( + ontology.getDomain(), + ontology.getName(), + projectName, + KlabAsset.KnowledgeClass.ONTOLOGY)); + } - return ret; + for (var definition : ontology.getConceptDeclarations()) { + ret.getStatements().add(adaptConceptDefinition(definition, ontology.getName(), projectName)); } - private KimConceptStatement adaptConceptDefinition(ConceptDeclarationSyntax definition, - String namespace, String projectName) { - - KimConceptStatementImpl ret = new KimConceptStatementImpl(); - - ret.setUrn(definition.getName()); - ret.setNamespace(namespace); - ret.setAbstract(definition.isAbstract()); - ret.setSealed(definition.isSealed()); - ret.setSubjective(definition.isSubjective()); - ret.setDocstring(definition.getDescription()); - ret.setAlias(definition.isAlias()); - ret.setOffsetInDocument(definition.getCodeOffset()); - ret.setLength(definition.getCodeLength()); - ret.setDeprecation(definition.getDeprecation()); - ret.setDeprecated(definition.getDeprecation() != null); - ret.setProjectName(projectName); - ret.setType(adaptSemanticType(definition.getDeclaredType())); - ret.setDocumentClass(KlabAsset.KnowledgeClass.ONTOLOGY); - - if (definition.isDeniable()) { - ret.getType().add(SemanticType.DENIABLE); - } - if (definition.isAbstract()) { - ret.getType().add(SemanticType.ABSTRACT); - } - if (definition.isSealed()) { - ret.getType().add(SemanticType.SEALED); - } - if (definition.isSubjective()) { - ret.getType().add(SemanticType.SUBJECTIVE); - } + ret.getNotifications().addAll(notifications); + + return ret; + } + + private KimConceptStatement adaptConceptDefinition( + ConceptDeclarationSyntax definition, String namespace, String projectName) { + + KimConceptStatementImpl ret = new KimConceptStatementImpl(); + + ret.setUrn(definition.getName()); + ret.setNamespace(namespace); + ret.setAbstract(definition.isAbstract()); + ret.setSealed(definition.isSealed()); + ret.setSubjective(definition.isSubjective()); + ret.setDocstring(definition.getDescription()); + ret.setAlias(definition.isAlias()); + ret.setOffsetInDocument(definition.getCodeOffset()); + ret.setLength(definition.getCodeLength()); + ret.setDeprecation(definition.getDeprecation()); + ret.setDeprecated(definition.getDeprecation() != null); + ret.setProjectName(projectName); + ret.setType(adaptSemanticType(definition.getDeclaredType())); + ret.setDocumentClass(KlabAsset.KnowledgeClass.ONTOLOGY); + + if (definition.isDeniable()) { + ret.getType().add(SemanticType.DENIABLE); + } + if (definition.isAbstract()) { + ret.getType().add(SemanticType.ABSTRACT); + } + if (definition.isSealed()) { + ret.getType().add(SemanticType.SEALED); + } + if (definition.isSubjective()) { + ret.getType().add(SemanticType.SUBJECTIVE); + } - if (definition.isCoreDeclaration()) { - ret.setUpperConceptDefined(definition.getDeclaredParent().encode()); - } else { - ret.setDeclaredParent(definition.getDeclaredParent() == null ? null : - adaptSemantics(definition.getDeclaredParent(), namespace, projectName, - KlabAsset.KnowledgeClass.ONTOLOGY)); - if (ret.getDeclaredParent() != null && definition.isGenericQuality()) { - ret.getType().clear(); - ret.getType().addAll(ret.getDeclaredParent().getType()); - } - } - for (var child : definition.getChildren()) { - ret.getChildren().add(adaptConceptDefinition(child, namespace, projectName)); - } - return ret; + if (definition.isCoreDeclaration()) { + ret.setUpperConceptDefined(definition.getDeclaredParent().encode()); + } else { + ret.setDeclaredParent( + definition.getDeclaredParent() == null + ? null + : adaptSemantics( + definition.getDeclaredParent(), + namespace, + projectName, + KlabAsset.KnowledgeClass.ONTOLOGY)); + if (ret.getDeclaredParent() != null && definition.isGenericQuality()) { + ret.getType().clear(); + ret.getType().addAll(ret.getDeclaredParent().getType()); + } + } + for (var child : definition.getChildren()) { + ret.getChildren().add(adaptConceptDefinition(child, namespace, projectName)); } + return ret; + } } diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/WorldviewValidationScope.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/WorldviewValidationScope.java index 7863c3b3f..c9e23837f 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/WorldviewValidationScope.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/lang/WorldviewValidationScope.java @@ -13,140 +13,147 @@ import java.util.HashSet; import java.util.Set; -/** - * Worldview-aware semantic validation scope - */ +/** Worldview-aware semantic validation scope */ public class WorldviewValidationScope extends BasicObservableValidationScope { - public WorldviewValidationScope() { - } + public WorldviewValidationScope() {} - public WorldviewValidationScope(Worldview worldview) { - for (var ontology : worldview.getOntologies()) { - for (var statement : ontology.getStatements()) { - loadConcepts(statement, ontology.getUrn()); - } - } + public WorldviewValidationScope(Worldview worldview) { + for (var ontology : worldview.getOntologies()) { + for (var statement : ontology.getStatements()) { + loadConcepts(statement, ontology.getUrn()); + } } + } - public void clearNamespace(String namespace) { - Set keys = new HashSet<>(); - String ns = namespace + ":"; - for (var concept : conceptTypes.keySet()) { - if (concept.startsWith(ns)) { - keys.add(concept); - } - } - synchronized (conceptTypes) { - keys.forEach(key -> conceptTypes.remove(key)); - } + public void clearNamespace(String namespace) { + Set keys = new HashSet<>(); + String ns = namespace + ":"; + for (var concept : conceptTypes.keySet()) { + if (concept.startsWith(ns)) { + keys.add(concept); + } } - - public void addNamespace(KimOntology ontology) { - for (var statement : ontology.getStatements()) { - loadConcepts(statement, ontology.getUrn()); - } + synchronized (conceptTypes) { + keys.forEach(key -> conceptTypes.remove(key)); } + } - @Override - public ConceptDescriptor createConceptDescriptor(ConceptDeclarationSyntax declaration) { - // trust the "is core" to define the type for all core ontology concepts + public void addNamespace(KimOntology ontology) { + for (var statement : ontology.getStatements()) { + loadConcepts(statement, ontology.getUrn()); + } + } - if (declaration.isCoreDeclaration()) { - SemanticSyntax coreConcept = declaration.getDeclaredParent(); - var coreId = coreConcept.encode(); - var cname = coreConcept.encode().split(":"); + @Override + public ConceptDescriptor createConceptDescriptor(ConceptDeclarationSyntax declaration) { + // trust the "is core" to define the type for all core ontology concepts - this.conceptTypes.put(coreConcept.encode(), new ConceptDescriptor(cname[0], cname[1], - declaration.getDeclaredType(), coreConcept.encode(), - "Core concept " + coreConcept.encode() + " for type " + declaration.getDeclaredType(), - true, false)); - } - return super.createConceptDescriptor(declaration); - } + if (declaration.isCoreDeclaration()) { + SemanticSyntax coreConcept = declaration.getDeclaredParent(); + var coreId = coreConcept.encode(); + var cname = coreConcept.encode().split(":"); - private void loadConcepts(KimConceptStatement statement, String namespace) { - String defaultLabel = namespace + ":" + statement.getUrn(); - ConceptDescriptor descriptor = new ConceptDescriptor(namespace, statement.getUrn(), - getMainType(statement.getType()), statement.getMetadata().get(Metadata.DC_LABEL, - defaultLabel), statement.getMetadata().get(Metadata.DC_COMMENT, String.class), - statement.isAbstract(), false); - this.conceptTypes.put(defaultLabel, descriptor); - for (var child : statement.getChildren()) { - loadConcepts(child, namespace); - } + this.conceptTypes.put( + coreConcept.encode(), + new ConceptDescriptor( + cname[0], + cname[1], + declaration.getDeclaredType(), + coreConcept.encode(), + "Core concept " + coreConcept.encode() + " for type " + declaration.getDeclaredType(), + true, + false)); } + return super.createConceptDescriptor(declaration); + } - /** - * Turn the K.IM main semantic typeset into its syntactic equivalent. No VOID is allowed if a worldview is - * loaded. - * - * @param type - * @return - */ - public static SemanticSyntax.Type getMainType(Set type) { - Set strippedType = EnumSet.copyOf(type); - strippedType.retainAll(SemanticType.DECLARABLE_TYPES); - if (strippedType.isEmpty() && type.contains(SemanticType.QUALITY)) { - // shouldn't happen in KimDeclarations - return SemanticSyntax.Type.GENERIC_QUALITY; - } else if (strippedType.size() == 1) { - return getMainType(strippedType.iterator().next()); - } - // No VOID admitted if we have a worldview - return SemanticSyntax.Type.NOTHING; + private void loadConcepts(KimConceptStatement statement, String namespace) { + String defaultLabel = namespace + ":" + statement.getUrn(); + ConceptDescriptor descriptor = + new ConceptDescriptor( + namespace, + statement.getUrn(), + getMainType(statement.getType()), + statement.getMetadata().get(Metadata.DC_LABEL, defaultLabel), + statement.getMetadata().get(Metadata.DC_COMMENT, String.class), + statement.isAbstract(), + false); + this.conceptTypes.put(defaultLabel, descriptor); + for (var child : statement.getChildren()) { + loadConcepts(child, namespace); } + } - public static SemanticSyntax.Type getMainType(SemanticType type) { - return switch (type) { - case PROPORTION -> SemanticSyntax.Type.PROPORTION; - case PROBABILITY -> SemanticSyntax.Type.PROBABILITY; - case DISTANCE, LENGTH -> SemanticSyntax.Type.LENGTH; - case VALUE -> SemanticSyntax.Type.VALUE; - case OCCURRENCE -> SemanticSyntax.Type.OCCURRENCE; - case PRESENCE -> SemanticSyntax.Type.PRESENCE; - case UNCERTAINTY -> SemanticSyntax.Type.UNCERTAINTY; - case NUMEROSITY -> SemanticSyntax.Type.NUMEROSITY; - case RATE -> SemanticSyntax.Type.RATE; - case CLASS -> SemanticSyntax.Type.CLASS; - case QUANTITY -> SemanticSyntax.Type.QUANTITY; - case ENERGY -> SemanticSyntax.Type.ENERGY; - case ENTROPY -> SemanticSyntax.Type.ENTROPY; - case ROLE -> SemanticSyntax.Type.ROLE; - case EXTENT -> SemanticSyntax.Type.EXTENT; - case MONETARY_VALUE -> SemanticSyntax.Type.MONETARY_VALUE; - case DOMAIN -> SemanticSyntax.Type.DOMAIN; - case MASS -> SemanticSyntax.Type.MASS; - case VOLUME -> SemanticSyntax.Type.VOLUME; - case WEIGHT -> SemanticSyntax.Type.WEIGHT; - case MONEY -> SemanticSyntax.Type.MONEY; - case DURATION -> SemanticSyntax.Type.DURATION; - case AREA -> SemanticSyntax.Type.AREA; - case ACCELERATION -> SemanticSyntax.Type.ACCELERATION; - case PRIORITY -> SemanticSyntax.Type.PRIORITY; - case ELECTRIC_POTENTIAL -> SemanticSyntax.Type.ELECTRIC_POTENTIAL; - case CHARGE -> SemanticSyntax.Type.CHARGE; - case RESISTANCE -> SemanticSyntax.Type.RESISTANCE; - case RESISTIVITY -> SemanticSyntax.Type.RESISTIVITY; - case PRESSURE -> SemanticSyntax.Type.PRESSURE; - case ANGLE -> SemanticSyntax.Type.ANGLE; - case VELOCITY -> SemanticSyntax.Type.VELOCITY; - case TEMPERATURE -> SemanticSyntax.Type.TEMPERATURE; - case VISCOSITY -> SemanticSyntax.Type.VISCOSITY; - case RATIO -> SemanticSyntax.Type.RATIO; - case AMOUNT -> SemanticSyntax.Type.AMOUNT; - case SUBJECT -> SemanticSyntax.Type.SUBJECT; - case AGENT -> SemanticSyntax.Type.AGENT; - case EVENT -> SemanticSyntax.Type.EVENT; - case RELATIONSHIP -> SemanticSyntax.Type.FUNCTIONAL_RELATIONSHIP; - case PROCESS -> SemanticSyntax.Type.PROCESS; - case CONFIGURATION -> SemanticSyntax.Type.CONFIGURATION; - case ATTRIBUTE -> SemanticSyntax.Type.ATTRIBUTE; - case REALM -> SemanticSyntax.Type.REALM; - case IDENTITY -> SemanticSyntax.Type.IDENTITY; - case ORDERING -> SemanticSyntax.Type.ORDERING; - default -> SemanticSyntax.Type.NOTHING; - }; + /** + * Turn the K.IM main semantic typeset into its syntactic equivalent. No VOID is allowed if a + * worldview is loaded. + * + * @param type + * @return + */ + public static SemanticSyntax.Type getMainType(Set type) { + Set strippedType = EnumSet.copyOf(type); + strippedType.retainAll(SemanticType.DECLARABLE_TYPES); + if (strippedType.isEmpty() && type.contains(SemanticType.QUALITY)) { + // shouldn't happen in KimDeclarations + return SemanticSyntax.Type.GENERIC_QUALITY; + } else if (strippedType.size() == 1) { + return getMainType(strippedType.iterator().next()); } + // No VOID admitted if we have a worldview + return SemanticSyntax.Type.NOTHING; + } + public static SemanticSyntax.Type getMainType(SemanticType type) { + return switch (type) { + case PROPORTION -> SemanticSyntax.Type.PROPORTION; + case PROBABILITY -> SemanticSyntax.Type.PROBABILITY; + case DISTANCE, LENGTH -> SemanticSyntax.Type.LENGTH; + case VALUE -> SemanticSyntax.Type.VALUE; + case OCCURRENCE -> SemanticSyntax.Type.OCCURRENCE; + case PRESENCE -> SemanticSyntax.Type.PRESENCE; + case UNCERTAINTY -> SemanticSyntax.Type.UNCERTAINTY; + case NUMEROSITY -> SemanticSyntax.Type.NUMEROSITY; + case RATE -> SemanticSyntax.Type.RATE; + case CLASS -> SemanticSyntax.Type.CLASS; + case QUANTITY -> SemanticSyntax.Type.QUANTITY; + case ENERGY -> SemanticSyntax.Type.ENERGY; + case ENTROPY -> SemanticSyntax.Type.ENTROPY; + case ROLE -> SemanticSyntax.Type.ROLE; + case EXTENT -> SemanticSyntax.Type.EXTENT; + case MONETARY_VALUE -> SemanticSyntax.Type.MONETARY_VALUE; + case DOMAIN -> SemanticSyntax.Type.DOMAIN; + case MASS -> SemanticSyntax.Type.MASS; + case VOLUME -> SemanticSyntax.Type.VOLUME; + case WEIGHT -> SemanticSyntax.Type.WEIGHT; + case MONEY -> SemanticSyntax.Type.MONEY; + case DURATION -> SemanticSyntax.Type.DURATION; + case AREA -> SemanticSyntax.Type.AREA; + case ACCELERATION -> SemanticSyntax.Type.ACCELERATION; + case PRIORITY -> SemanticSyntax.Type.PRIORITY; + case ELECTRIC_POTENTIAL -> SemanticSyntax.Type.ELECTRIC_POTENTIAL; + case CHARGE -> SemanticSyntax.Type.CHARGE; + case RESISTANCE -> SemanticSyntax.Type.RESISTANCE; + case RESISTIVITY -> SemanticSyntax.Type.RESISTIVITY; + case PRESSURE -> SemanticSyntax.Type.PRESSURE; + case ANGLE -> SemanticSyntax.Type.ANGLE; + case VELOCITY -> SemanticSyntax.Type.VELOCITY; + case TEMPERATURE -> SemanticSyntax.Type.TEMPERATURE; + case VISCOSITY -> SemanticSyntax.Type.VISCOSITY; + case RATIO -> SemanticSyntax.Type.RATIO; + case AMOUNT -> SemanticSyntax.Type.AMOUNT; + case SUBJECT -> SemanticSyntax.Type.SUBJECT; + case AGENT -> SemanticSyntax.Type.AGENT; + case EVENT -> SemanticSyntax.Type.EVENT; + case RELATIONSHIP -> SemanticSyntax.Type.FUNCTIONAL_RELATIONSHIP; + case PROCESS -> SemanticSyntax.Type.PROCESS; + case CONFIGURATION -> SemanticSyntax.Type.CONFIGURATION; + case ATTRIBUTE -> SemanticSyntax.Type.ATTRIBUTE; + case REALM -> SemanticSyntax.Type.REALM; + case IDENTITY -> SemanticSyntax.Type.IDENTITY; + case ORDERING -> SemanticSyntax.Type.ORDERING; + default -> SemanticSyntax.Type.NOTHING; + }; + } } diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/library/ProjectLibrary.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/library/ProjectLibrary.java index d084907b4..40959e4c1 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/library/ProjectLibrary.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/library/ProjectLibrary.java @@ -10,8 +10,10 @@ import org.integratedmodelling.klab.api.services.runtime.extension.Library; import org.integratedmodelling.klab.services.base.BaseService; -@Library(name = "project", description = "Import and export of k.IM projects", version = - Version.CURRENT) +@Library( + name = "project", + description = "Import and export of k.IM projects", + version = Version.CURRENT) public class ProjectLibrary { @Importer(schema = "git", knowledgeClass = KlabAsset.KnowledgeClass.COMPONENT, diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ModelKbox.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ModelKbox.java index 84de0a18b..8dd82225f 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ModelKbox.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ModelKbox.java @@ -39,865 +39,945 @@ public class ModelKbox extends ObservableKbox { - // private boolean workRemotely = !Configuration.INSTANCE.isOffline(); - private boolean initialized = false; - - /** - * Create a kbox with the passed name. If the kbox exists, open it and return it. - * - * @param service - * @return a new kbox - */ - public static ModelKbox create(ResourcesService service) { - return new ModelKbox(service); - } + // private boolean workRemotely = !Configuration.INSTANCE.isOffline(); + private boolean initialized = false; - private ModelKbox(ResourcesService service) { - super(service.getLocalName(), service.serviceScope()); - this.resourceService = service; - } + /** + * Create a kbox with the passed name. If the kbox exists, open it and return it. + * + * @param service + * @return a new kbox + */ + public static ModelKbox create(ResourcesService service) { + return new ModelKbox(service); + } - @Override - protected void initialize(Channel monitor) { - - if (!initialized) { - - initialized = true; - - setSchema(ModelReference.class, new Schema() { - - @Override - public String getTableName() { - return getMainTableId(); - } - - @Override - public String getCreateSQL() { - String ret = "CREATE TABLE model (" + "oid LONG, " + "serverid VARCHAR(64), " + "id " + - "VARCHAR(256), " - + "name VARCHAR(256), " + "namespaceid VARCHAR(128), " + "projectid VARCHAR" + - "(128), " + - "typeid LONG, " - + "otypeid LONG, " + "scope VARCHAR(16), " + "isresolved BOOLEAN, " + - "isreification " + - "BOOLEAN, " - + "inscenario BOOLEAN, " + "hasdirectobjects BOOLEAN, " + "hasdirectdata " + - "BOOLEAN, " - + "timestart LONG, " + "timeend LONG, " + "isspatial BOOLEAN, " + "istemporal " + - "BOOLEAN, " - + "timemultiplicity LONG, " + "spacemultiplicity LONG, " + "scalemultiplicity " + - "LONG, " - + "dereifyingattribute VARCHAR(256), " + "minspatialscale INTEGER, " + - "maxspatialscale " + - "INTEGER, " - + "mintimescale INTEGER, " + "maxtimescale INTEGER, " + "space GEOMETRY, " - + "observationtype VARCHAR(256), " + "enumeratedspacedomain VARCHAR(256), " - + "enumeratedspacelocation VARCHAR(1024), " + "specializedObservable BOOLEAN " + "); " - + "CREATE INDEX model_oid_index ON model(oid); " - // + "CREATE SPATIAL INDEX model_space ON model(space);" - ; - - return ret; - - } - }); - - setSerializer(ModelReference.class, new Serializer() { - - private String cn(Object o) { - return o == null ? "" : o.toString(); - } - - @Override - public String serialize(ModelReference model, long primaryKey, long foreignKey) { - - long tid = requireConceptId(model.getObservableConcept(), monitor); - - String ret = - "INSERT INTO model VALUES (" + primaryKey + ", " + "'" + cn(model.getServerId()) + - "', " + "'" - + cn(model.getName()) + "', " + "'" + cn( - model.getName()) + "', " + "'" + cn(model.getNamespaceId()) - + "', " + "'" + cn(model.getProjectId()) + "', " + tid + ", " - + /* observation concept is obsolete oid - */ 0 + ", '" + (model.getScope().name()) + "', " - + (model.isResolved() ? "TRUE" : "FALSE") + ", " + (model.isReification() ? - "TRUE" : - "FALSE") + ", " - + (model.isInScenario() ? "TRUE" : "FALSE") + ", " + (model.isHasDirectObjects() ? - "TRUE" : "FALSE") - + ", " + (model.isHasDirectData() ? "TRUE" : "FALSE") + ", " + model.getTimeStart() + ", " - + model.getTimeEnd() + ", " + (model.isSpatial() ? "TRUE" : "FALSE") + - ", " - + (model.isTemporal() ? "TRUE" : "FALSE") + ", " + model.getTimeMultiplicity() + ", " - + model.getSpaceMultiplicity() + ", " + model.getScaleMultiplicity() + - ", " + "'" - + cn( - model.getDereifyingAttribute()) + "', " + model.getMinSpatialScaleFactor() + ", " - + model.getMaxSpatialScaleFactor() + ", " + model.getMinTimeScaleFactor() + ", " - + model.getMaxTimeScaleFactor() + ", " + "'" - + (model.getShape() == null - ? "GEOMETRYCOLLECTION EMPTY" - : - ShapeImpl.promote( - model.getShape()).getStandardizedGeometry().toString()) - + "', '" + model.getObservationType() + "', '" + cn( - model.getEnumeratedSpaceDomain()) + - "', '" - + cn(model.getEnumeratedSpaceLocation()) + "', " - + (model.isSpecializedObservable() ? "TRUE" : "FALSE") + ");"; - - if (model.getMetadata() != null && model.getMetadata().size() > 0) { - storeMetadataFor(primaryKey, model.getMetadata()); - } - - return ret; - } - }); + private ModelKbox(ResourcesService service) { + super(service.getLocalName(), service.serviceScope()); + this.resourceService = service; + } - } - } + @Override + protected void initialize(Channel monitor) { - /** - * Pass the output of queryModelData to a contextual prioritizer and return the ranked list of IModels. If - * we're a personal engine, also broadcast the query to the network and merge results before returning. - * - * @param observable - * @param scope - * @return models resulting from query, best first. - */ - public Collection query(Observable observable, ContextScope scope) { + if (!initialized) { - initialize(scope); + initialized = true; - Set local = new LinkedHashSet<>(); - /* - * only query locally if we've seen a model before. - */ - if (database.hasTable("model")) { - for (ModelReference md : queryModels(observable, scope)) { - if (md.getPermissions().checkAuthorization(scope)) { - local.add(md); - } - } - } - return local; - } - - /** - * Find and deserialize all modeldata matching the parameters. Do not rank or anything. - * - * @param observable - * @param context - * @return all unranked model descriptors matching the query - */ - public List queryModels(Observable observable, ContextScope context) { + setSchema( + ModelReference.class, + new Schema() { - List ret = new ArrayList<>(); - - if (!database.hasTable("model")) { - return ret; - } - - var geometry = ContextScope.getResolutionGeometry(context); - if (geometry == null || geometry.isEmpty()) { - return ret; - } + @Override + public String getTableName() { + return getMainTableId(); + } - var scale = Scale.create(geometry); - String query = "SELECT model.oid FROM model WHERE "; - Concept contextObservable = context.getContextObservation() == null - ? null - : context.getContextObservation().getObservable().getSemantics(); + @Override + public String getCreateSQL() { + String ret = + "CREATE TABLE model (" + + "oid LONG, " + + "serverid VARCHAR(64), " + + "id " + + "VARCHAR(256), " + + "name VARCHAR(256), " + + "namespaceid VARCHAR(128), " + + "projectid VARCHAR" + + "(128), " + + "typeid LONG, " + + "otypeid LONG, " + + "scope VARCHAR(16), " + + "isresolved BOOLEAN, " + + "isreification " + + "BOOLEAN, " + + "inscenario BOOLEAN, " + + "hasdirectobjects BOOLEAN, " + + "hasdirectdata " + + "BOOLEAN, " + + "timestart LONG, " + + "timeend LONG, " + + "isspatial BOOLEAN, " + + "istemporal " + + "BOOLEAN, " + + "timemultiplicity LONG, " + + "spacemultiplicity LONG, " + + "scalemultiplicity " + + "LONG, " + + "dereifyingattribute VARCHAR(256), " + + "minspatialscale INTEGER, " + + "maxspatialscale " + + "INTEGER, " + + "mintimescale INTEGER, " + + "maxtimescale INTEGER, " + + "space GEOMETRY, " + + "observationtype VARCHAR(256), " + + "enumeratedspacedomain VARCHAR(256), " + + "enumeratedspacelocation VARCHAR(1024), " + + "specializedObservable BOOLEAN " + + "); " + + "CREATE INDEX model_oid_index ON model(oid); " + // + "CREATE SPATIAL INDEX model_space ON model(space);" + ; + + return ret; + } + }); - String typequery = observableQuery(observable, contextObservable); - if (typequery == null) { - return ret; - } + setSerializer( + ModelReference.class, + new Serializer() { - query += "(" + scopeQuery(context, observable) + ")"; - query += " AND (" + typequery + ")"; - if (scale.getSpace() != null) { - String sq = spaceQuery(scale.getSpace()); - if (!sq.isEmpty()) { - query += " AND (" + sq + ")"; + private String cn(Object o) { + return o == null ? "" : o.toString(); } - } - - String tquery = timeQuery(scale.getTime()); - if (!tquery.isEmpty()) { - query += " AND (" + tquery + ");"; - } - // Logging.INSTANCE.info(query); - - final List oids = database.queryIds(query); - for (long l : oids) { - ModelReference model = retrieveModel(l, context); - if (model != null) { - Coverage coverage = resourceService.modelGeometry(model.getName()); - if (coverage != null && !coverage.checkConstraints(scale)) { - resourceService.serviceScope().debug("model " + model.getName() + " of " + observable - + " discarded because of coverage constraints mismatch"); - continue; - } - ret.add(model); + @Override + public String serialize(ModelReference model, long primaryKey, long foreignKey) { + + long tid = requireConceptId(model.getObservableConcept(), monitor); + + String ret = + "INSERT INTO model VALUES (" + + primaryKey + + ", " + + "'" + + cn(model.getServerId()) + + "', " + + "'" + + cn(model.getName()) + + "', " + + "'" + + cn(model.getName()) + + "', " + + "'" + + cn(model.getNamespaceId()) + + "', " + + "'" + + cn(model.getProjectId()) + + "', " + + tid + + ", " + + /* observation concept is obsolete oid + */ 0 + + ", '" + + (model.getScope().name()) + + "', " + + (model.isResolved() ? "TRUE" : "FALSE") + + ", " + + (model.isReification() ? "TRUE" : "FALSE") + + ", " + + (model.isInScenario() ? "TRUE" : "FALSE") + + ", " + + (model.isHasDirectObjects() ? "TRUE" : "FALSE") + + ", " + + (model.isHasDirectData() ? "TRUE" : "FALSE") + + ", " + + model.getTimeStart() + + ", " + + model.getTimeEnd() + + ", " + + (model.isSpatial() ? "TRUE" : "FALSE") + + ", " + + (model.isTemporal() ? "TRUE" : "FALSE") + + ", " + + model.getTimeMultiplicity() + + ", " + + model.getSpaceMultiplicity() + + ", " + + model.getScaleMultiplicity() + + ", " + + "'" + + cn(model.getDereifyingAttribute()) + + "', " + + model.getMinSpatialScaleFactor() + + ", " + + model.getMaxSpatialScaleFactor() + + ", " + + model.getMinTimeScaleFactor() + + ", " + + model.getMaxTimeScaleFactor() + + ", " + + "'" + + (model.getShape() == null + ? "GEOMETRYCOLLECTION EMPTY" + : ShapeImpl.promote(model.getShape()) + .getStandardizedGeometry() + .toString()) + + "', '" + + model.getObservationType() + + "', '" + + cn(model.getEnumeratedSpaceDomain()) + + "', '" + + cn(model.getEnumeratedSpaceLocation()) + + "', " + + (model.isSpecializedObservable() ? "TRUE" : "FALSE") + + ");"; + + if (model.getMetadata() != null && model.getMetadata().size() > 0) { + storeMetadataFor(primaryKey, model.getMetadata()); + } + + return ret; } + }); + } + } + + /** + * Pass the output of queryModelData to a contextual prioritizer and return the ranked list of + * IModels. If we're a personal engine, also broadcast the query to the network and merge results + * before returning. + * + * @param observable + * @param scope + * @return models resulting from query, best first. + */ + public Collection query(Observable observable, ContextScope scope) { + + initialize(scope); + + Set local = new LinkedHashSet<>(); + /* + * only query locally if we've seen a model before. + */ + if (database.hasTable("model")) { + for (ModelReference md : queryModels(observable, scope)) { + if (md.getPermissions().checkAuthorization(scope)) { + local.add(md); } + } + } + return local; + } + + /** + * Find and deserialize all modeldata matching the parameters. Do not rank or anything. + * + * @param observable + * @param context + * @return all unranked model descriptors matching the query + */ + public List queryModels(Observable observable, ContextScope context) { + + List ret = new ArrayList<>(); + + if (!database.hasTable("model")) { + return ret; + } - resourceService.serviceScope().info( - "model query for " + observable.getDescriptionType().name().toLowerCase() + " of " - + observable + " found " + (ret.size() == 1 ? ret.get( - 0).getName() : (ret.size() + " models" - ))); + var geometry = ContextScope.getResolutionGeometry(context); + if (geometry == null || geometry.isEmpty()) { + return ret; + } - return ret; + var scale = Scale.create(geometry); + String query = "SELECT model.oid FROM model WHERE "; + Concept contextObservable = + context.getContextObservation() == null + ? null + : context.getContextObservation().getObservable().getSemantics(); + + String typequery = observableQuery(observable, contextObservable); + if (typequery == null) { + return ret; } - // private boolean isAuthorized(ModelReference model, IObservable observable, - // Set - // userPermissions, - // Collection constraints) { - // - // if (model.getProjectId() != null) { - // Set permissions = - // Authentication.INSTANCE.getProjectPermissions(model.getProjectId()); - // if (!permissions.isEmpty()) { - // if (Sets.intersection(permissions, userPermissions).size() == 0) { - // return false; - // } - // } - // } - // - // if (constraints != null) { - // for (IResolutionConstraint c : constraints) { - // KlabAsset m = resourceService.resolveAsset(model.getUrn()); - // if (m instanceof KimModelStatement) { - // if (!c.accepts((IModel) m, observable)) { - // return false; - // } - // } - // } - // } - // - // return true; - // } + query += "(" + scopeQuery(context, observable) + ")"; + query += " AND (" + typequery + ")"; + if (scale.getSpace() != null) { + String sq = spaceQuery(scale.getSpace()); + if (!sq.isEmpty()) { + query += " AND (" + sq + ")"; + } + } - private String observableQuery(Observable observable, Concept context) { + String tquery = timeQuery(scale.getTime()); + if (!tquery.isEmpty()) { + query += " AND (" + tquery + ");"; + } - Set ids = this.getCompatibleTypeIds(observable, context); - if (ids == null || ids.size() == 0) { - return null; + // Logging.INSTANCE.info(query); + + final List oids = database.queryIds(query); + for (long l : oids) { + ModelReference model = retrieveModel(l, context); + if (model != null) { + Coverage coverage = resourceService.modelGeometry(model.getName()); + if (coverage != null && !coverage.checkConstraints(scale)) { + resourceService + .serviceScope() + .debug( + "model " + + model.getName() + + " of " + + observable + + " discarded because of coverage constraints mismatch"); + continue; } - StringBuilder ret = new StringBuilder(); - for (long id : ids) { - ret.append((ret.length() == 0) ? "" : ", ").append(id); - } - return "typeid IN (" + ret + ")"; + ret.add(model); + } } - /* - * select models that are [instantiators if required] AND:] [private and in the home namespace - * if not dummy OR] [project private and in the home project if not dummy OR] (non-private and - * non-scenario) OR (in any of the scenarios in the context). - */ - private String scopeQuery(ContextScope context, Observable observable) { - - String ret = ""; - String projectId = null; - String namespaceId = context.getConstraint( - ResolutionConstraint.Type.ResolutionNamespace, - DUMMY_NAMESPACE_ID); - if (!namespaceId.equals(DUMMY_NAMESPACE_ID)) { - ret += "(model.namespaceid = '" + namespaceId + "')"; - projectId = context.getConstraint(ResolutionConstraint.Type.ResolutionProject, String.class); - } + resourceService + .serviceScope() + .info( + "model query for " + + observable.getDescriptionType().name().toLowerCase() + + " of " + + observable + + " found " + + (ret.size() == 1 ? ret.get(0).getName() : (ret.size() + " models"))); + + return ret; + } + + // private boolean isAuthorized(ModelReference model, IObservable observable, + // Set + // userPermissions, + // Collection constraints) { + // + // if (model.getProjectId() != null) { + // Set permissions = + // Authentication.INSTANCE.getProjectPermissions(model.getProjectId()); + // if (!permissions.isEmpty()) { + // if (Sets.intersection(permissions, userPermissions).size() == 0) { + // return false; + // } + // } + // } + // + // if (constraints != null) { + // for (IResolutionConstraint c : constraints) { + // KlabAsset m = resourceService.resolveAsset(model.getUrn()); + // if (m instanceof KimModelStatement) { + // if (!c.accepts((IModel) m, observable)) { + // return false; + // } + // } + // } + // } + // + // return true; + // } + + private String observableQuery(Observable observable, Concept context) { + + Set ids = this.getCompatibleTypeIds(observable, context); + if (ids == null || ids.size() == 0) { + return null; + } + StringBuilder ret = new StringBuilder(); + for (long id : ids) { + ret.append((ret.length() == 0) ? "" : ", ").append(id); + } + return "typeid IN (" + ret + ")"; + } + + /* + * select models that are [instantiators if required] AND:] [private and in the home namespace + * if not dummy OR] [project private and in the home project if not dummy OR] (non-private and + * non-scenario) OR (in any of the scenarios in the context). + */ + private String scopeQuery(ContextScope context, Observable observable) { + + String ret = ""; + String projectId = null; + String namespaceId = + context.getConstraint(ResolutionConstraint.Type.ResolutionNamespace, DUMMY_NAMESPACE_ID); + if (!namespaceId.equals(DUMMY_NAMESPACE_ID)) { + ret += "(model.namespaceid = '" + namespaceId + "')"; + projectId = context.getConstraint(ResolutionConstraint.Type.ResolutionProject, String.class); + } - ret += (ret.isEmpty() ? "" : " OR ") + "((NOT model.scope = 'NAMESPACE') AND (NOT model.inscenario))"; + ret += + (ret.isEmpty() ? "" : " OR ") + + "((NOT model.scope = 'NAMESPACE') AND (NOT model.inscenario))"; + + if (!context.getConstraints(ResolutionConstraint.Type.Scenarios, String.class).isEmpty()) { + ret += + " OR (" + + joinStringConditions( + "model.namespaceid", + context.getConstraints(ResolutionConstraint.Type.Scenarios, String.class), + "OR") + + ")"; + } - if (!context.getConstraints(ResolutionConstraint.Type.Scenarios, String.class).isEmpty()) { - ret += " OR (" + joinStringConditions( - "model.namespaceid", - context.getConstraints(ResolutionConstraint.Type.Scenarios, String.class), - "OR") + ")"; - } + if (observable.is(SemanticType.COUNTABLE)) { + if (observable.getDescriptionType().isInstantiation()) { + ret = "(" + ret + ") AND model.isreification"; + } else { + ret = "(" + ret + ") AND (NOT model.isreification)"; + } + } - if (observable.is(SemanticType.COUNTABLE)) { - if (observable.getDescriptionType().isInstantiation()) { - ret = "(" + ret + ") AND model.isreification"; - } else { - ret = "(" + ret + ") AND (NOT model.isreification)"; - } - } + if (projectId != null) { + ret += " AND (NOT (model.scope = 'PROJECT' AND model.projectid <> '" + projectId + "'))"; + } - if (projectId != null) { - ret += " AND (NOT (model.scope = 'PROJECT' AND model.projectid <> '" + projectId + "'))"; - } + return ret; + } + + /* + * select models that intersect the given space or have no space at all. TODO must match + * geometry when forced - if it has @intensive(space, time) it shouldn't match no space/time OR + * non-distributed space/time. ALSO the dimensionality! + */ + private String spaceQuery(Space space) { + + space = resolveEnumeratedExtensions(space); + + if (space instanceof EnumeratedExtension) { + // Accept anything that is from the same authority or baseconcept. If the + // requesting + // context needs specific values, these should be checked later in the + // prioritizer. + // Pair defs = ((EnumeratedExtension) + // space).getExtentDescriptors(); + // return "model.enumeratedspacedomain = '" + defs.getFirst() + "'"; + throw new KlabUnimplementedException("enumerated extension"); + } - return ret; + if (space.getShape().isEmpty()) { + return ""; } - /* - * select models that intersect the given space or have no space at all. TODO must match - * geometry when forced - if it has @intensive(space, time) it shouldn't match no space/time OR - * non-distributed space/time. ALSO the dimensionality! - */ - private String spaceQuery(Space space) { - - space = resolveEnumeratedExtensions(space); - - if (space instanceof EnumeratedExtension) { - // Accept anything that is from the same authority or baseconcept. If the - // requesting - // context needs specific values, these should be checked later in the - // prioritizer. - // Pair defs = ((EnumeratedExtension) - // space).getExtentDescriptors(); - // return "model.enumeratedspacedomain = '" + defs.getFirst() + "'"; - throw new KlabUnimplementedException("enumerated extension"); - } + String scalequery = + space.getRank() + " BETWEEN model.minspatialscale AND model.maxspatialscale"; + + String spacequery = + "model.space && '" + + ShapeImpl.promote(space.getGeometricShape()).getStandardizedGeometry() + + "' OR ST_IsEmpty(model.space)"; + + return "(" + scalequery + ") AND (" + spacequery + ")"; + } + + /* + * Entirely TODO. For initialization we should use time only to select for most current info - + * either closer to the context or to today if time is null. For dynamic models we should either + * not have a context or cover the context. Guess this is the job of the prioritizer, and we + * should simply let anything through except when we look for T1(n>1) models. + * + * TODO must match geometry when forced - if it has @intensive(space, time) it shouldn't match + * no space/time OR non-distributed space/time. ALSO the dimensionality! + */ + private String timeQuery(Time time) { + + time = resolveEnumeratedExtensions(time); + + if (time /* still */ instanceof EnumeratedExtension) { + // TODO + throw new KlabUnimplementedException("enumerated extension"); + } - if (space.getShape().isEmpty()) { - return ""; - } + String ret = ""; + boolean checkBoundaries = false; + if (time != null && checkBoundaries) { + ret = "(timestart == -1 AND timeend == -1) OR ("; + long start = time.getStart() == null ? -1 : time.getStart().getMilliseconds(); + long end = time.getEnd() == null ? -1 : time.getEnd().getMilliseconds(); + if (start > 0 && end > 0) { + ret += "timestart >= " + start + " AND timeend <= " + end; + } else if (start > 0) { + ret += "timestart >= " + start; + } else if (end > 0) { + ret += "timeend <= " + end; + } + ret += ")"; + } + return ret; + } - String scalequery = space.getRank() + " BETWEEN model.minspatialscale AND model.maxspatialscale"; + public List retrieveAll(Channel monitor) throws KlabException { - String spacequery = - "model.space && '" + ShapeImpl.promote(space.getGeometricShape()).getStandardizedGeometry() - + "' OR ST_IsEmpty(model.space)"; + initialize(monitor); - return "(" + scalequery + ") AND (" + spacequery + ")"; + List ret = new ArrayList<>(); + if (!database.hasTable("model")) { + return ret; } + for (long oid : database.queryIds("SELECT oid FROM model;")) { + ret.add(retrieveModel(oid, monitor)); + } + return ret; + } + + public ModelReference retrieve(String query, Channel monitor) { + initialize(monitor); + + final ModelReference ret = new ModelReference(); + + database.query( + query, + new SQL.SimpleResultHandler() { + @Override + public void onRow(ResultSet rs) { + + try { + + SpatialResultSet srs = rs.unwrap(SpatialResultSet.class); + + long tyid = srs.getLong(7); + + ret.setName(srs.getString(4)); + + Concept mtype = getType(tyid).asConcept(); + + ret.setObservableConcept(mtype); + ret.setObservable(getTypeDefinition(tyid)); + + ret.setServerId(nullify(srs.getString(2))); + // ret.setId(srs.getString(3)); + + ret.setNamespaceId(srs.getString(5)); + ret.setProjectId(nullify(srs.getString(6))); + + ret.setScope(KlabStatement.Scope.valueOf(srs.getString(9))); + ret.setResolved(srs.getBoolean(10)); + ret.setReification(srs.getBoolean(11)); + ret.setInScenario(srs.getBoolean(12)); + ret.setHasDirectObjects(srs.getBoolean(13)); + ret.setHasDirectData(srs.getBoolean(14)); + ret.setTimeStart(srs.getLong(15)); + ret.setTimeEnd(srs.getLong(16)); + ret.setSpatial(srs.getBoolean(17)); + ret.setTemporal(srs.getBoolean(18)); + ret.setTimeMultiplicity(srs.getLong(19)); + ret.setSpaceMultiplicity(srs.getLong(20)); + ret.setScaleMultiplicity(srs.getLong(21)); + ret.setDereifyingAttribute(nullify(srs.getString(22))); + ret.setMinSpatialScaleFactor(srs.getInt(23)); + ret.setMaxSpatialScaleFactor(srs.getInt(24)); + ret.setMinTimeScaleFactor(srs.getInt(25)); + ret.setMaxTimeScaleFactor(srs.getInt(26)); + Geometry geometry = srs.getGeometry(27); + if (!geometry.isEmpty()) { + ret.setShape(Shape.create(geometry.toText(), Projection.getLatLon())); // + + } + } catch (SQLException e) { + throw new KlabStorageException(e); + } + } + }); - /* - * Entirely TODO. For initialization we should use time only to select for most current info - - * either closer to the context or to today if time is null. For dynamic models we should either - * not have a context or cover the context. Guess this is the job of the prioritizer, and we - * should simply let anything through except when we look for T1(n>1) models. - * - * TODO must match geometry when forced - if it has @intensive(space, time) it shouldn't match - * no space/time OR non-distributed space/time. ALSO the dimensionality! - */ - private String timeQuery(Time time) { + return ret; + } - time = resolveEnumeratedExtensions(time); + public ModelReference retrieveModel(long oid, Channel monitor) throws KlabException { - if (time /* still */ instanceof EnumeratedExtension) { - // TODO - throw new KlabUnimplementedException("enumerated extension"); - } + ModelReference ret = retrieve("SELECT * FROM model WHERE oid = " + oid, monitor); + ret.setMetadata(getMetadataFor(oid)); + return ret; + // + // initialize(monitor); + // + // final ModelReference ret = new ModelReference(); + // + // database.query("SELECT * FROM model WHERE oid = " + oid, new + // SQL.SimpleResultHandler() { + // @Override + // public void onRow(ResultSet rs) { + // + // try { + // + // SpatialResultSet srs = rs.unwrap(SpatialResultSet.class); + // + // long tyid = srs.getLong(7); + // + // ret.setName(srs.getString(4)); + // + // IConcept mtype = getType(tyid); + // + // ret.setObservableConcept(mtype); + // ret.setObservable(getTypeDefinition(tyid)); + // + // ret.setServerId(nullify(srs.getString(2))); + // ret.setId(srs.getString(3)); + // + // ret.setNamespaceId(srs.getString(5)); + // ret.setProjectId(nullify(srs.getString(6))); + // + // ret.setPrivateModel(srs.getBoolean(9)); + // ret.setResolved(srs.getBoolean(10)); + // ret.setReification(srs.getBoolean(11)); + // ret.setInScenario(srs.getBoolean(12)); + // ret.setHasDirectObjects(srs.getBoolean(13)); + // ret.setHasDirectData(srs.getBoolean(14)); + // ret.setTimeStart(srs.getLong(15)); + // ret.setTimeEnd(srs.getLong(16)); + // ret.setSpatial(srs.getBoolean(17)); + // ret.setTemporal(srs.getBoolean(18)); + // ret.setTimeMultiplicity(srs.getLong(19)); + // ret.setSpaceMultiplicity(srs.getLong(20)); + // ret.setScaleMultiplicity(srs.getLong(21)); + // ret.setDereifyingAttribute(nullify(srs.getString(22))); + // ret.setMinSpatialScaleFactor(srs.getInt(23)); + // ret.setMaxSpatialScaleFactor(srs.getInt(24)); + // ret.setMinTimeScaleFactor(srs.getInt(25)); + // ret.setMaxTimeScaleFactor(srs.getInt(26)); + // Geometry geometry = srs.getGeometry(27); + // if (!geometry.isEmpty()) { + // ret.setShape(Shape.create(geometry, Projection.getLatLon())); // + + // } + // } catch (SQLException e) { + // throw new KlabStorageException(e); + // } + // } + // + // }); + // + // ret.setMetadata(getMetadataFor(oid)); + // + // return ret; + } + + @Override + protected String getMainTableId() { + return "model"; + } + + /** + * @param name + * @return true if model with given id exists in database + */ + public boolean hasModel(String name) { + + if (!database.hasTable("model")) { + return false; + } - String ret = ""; - boolean checkBoundaries = false; - if (time != null && checkBoundaries) { - ret = "(timestart == -1 AND timeend == -1) OR ("; - long start = time.getStart() == null ? -1 : time.getStart().getMilliseconds(); - long end = time.getEnd() == null ? -1 : time.getEnd().getMilliseconds(); - if (start > 0 && end > 0) { - ret += "timestart >= " + start + " AND timeend <= " + end; - } else if (start > 0) { - ret += "timestart >= " + start; - } else if (end > 0) { - ret += "timeend <= " + end; - } - ret += ")"; - } - return ret; + return database.queryIds("SELECT oid FROM model WHERE name = '" + name + "';").size() > 0; + } + + @Override + protected int deleteAllObjectsWithNamespace(String namespaceId, Channel monitor) { + initialize(monitor); + int n = 0; + for (long oid : + database.queryIds( + "SELECT oid FROM model where namespaceid = '" + + Utils.Escape.forSQL(namespaceId) + + "';")) { + deleteObjectWithId(oid, monitor); + n++; } + return n; + } - public List retrieveAll(Channel monitor) throws KlabException { + @Override + protected void deleteObjectWithId(long id, Channel monitor) { + initialize(monitor); + database.execute("DELETE FROM model WHERE oid = " + id); + deleteMetadataFor(id); + } - initialize(monitor); + @Override + public long store(Object o, Scope monitor) { - List ret = new ArrayList<>(); - if (!database.hasTable("model")) { - return ret; - } - for (long oid : database.queryIds("SELECT oid FROM model;")) { - ret.add(retrieveModel(oid, monitor)); - } - return ret; - } + initialize(monitor); - public ModelReference retrieve(String query, Channel monitor) { - initialize(monitor); + // if (o instanceof KimNamespace && ((KimNamespace) o).isInternal()) { + // return 0; + // } - final ModelReference ret = new ModelReference(); + ArrayList toStore = new ArrayList<>(); - database.query(query, new SQL.SimpleResultHandler() { - @Override - public void onRow(ResultSet rs) { - - try { - - SpatialResultSet srs = rs.unwrap(SpatialResultSet.class); - - long tyid = srs.getLong(7); - - ret.setName(srs.getString(4)); - - Concept mtype = getType(tyid).asConcept(); - - ret.setObservableConcept(mtype); - ret.setObservable(getTypeDefinition(tyid)); - - ret.setServerId(nullify(srs.getString(2))); - // ret.setId(srs.getString(3)); - - ret.setNamespaceId(srs.getString(5)); - ret.setProjectId(nullify(srs.getString(6))); - - ret.setScope(KlabStatement.Scope.valueOf(srs.getString(9))); - ret.setResolved(srs.getBoolean(10)); - ret.setReification(srs.getBoolean(11)); - ret.setInScenario(srs.getBoolean(12)); - ret.setHasDirectObjects(srs.getBoolean(13)); - ret.setHasDirectData(srs.getBoolean(14)); - ret.setTimeStart(srs.getLong(15)); - ret.setTimeEnd(srs.getLong(16)); - ret.setSpatial(srs.getBoolean(17)); - ret.setTemporal(srs.getBoolean(18)); - ret.setTimeMultiplicity(srs.getLong(19)); - ret.setSpaceMultiplicity(srs.getLong(20)); - ret.setScaleMultiplicity(srs.getLong(21)); - ret.setDereifyingAttribute(nullify(srs.getString(22))); - ret.setMinSpatialScaleFactor(srs.getInt(23)); - ret.setMaxSpatialScaleFactor(srs.getInt(24)); - ret.setMinTimeScaleFactor(srs.getInt(25)); - ret.setMaxTimeScaleFactor(srs.getInt(26)); - Geometry geometry = srs.getGeometry(27); - if (!geometry.isEmpty()) { - ret.setShape(Shape.create(geometry.toText(), Projection.getLatLon())); // + - } - } catch (SQLException e) { - throw new KlabStorageException(e); - } - } + if (o instanceof KimModel) { - }); + resourceService.serviceScope().debug("storing model " + ((KimModel) o).getUrn()); - return ret; - } + for (ModelReference data : inferModels((KimModel) o, monitor)) { + toStore.add(data); + } - public ModelReference retrieveModel(long oid, Channel monitor) throws KlabException { - - ModelReference ret = retrieve("SELECT * FROM model WHERE oid = " + oid, monitor); - ret.setMetadata(getMetadataFor(oid)); - return ret; - // - // initialize(monitor); - // - // final ModelReference ret = new ModelReference(); - // - // database.query("SELECT * FROM model WHERE oid = " + oid, new - // SQL.SimpleResultHandler() { - // @Override - // public void onRow(ResultSet rs) { - // - // try { - // - // SpatialResultSet srs = rs.unwrap(SpatialResultSet.class); - // - // long tyid = srs.getLong(7); - // - // ret.setName(srs.getString(4)); - // - // IConcept mtype = getType(tyid); - // - // ret.setObservableConcept(mtype); - // ret.setObservable(getTypeDefinition(tyid)); - // - // ret.setServerId(nullify(srs.getString(2))); - // ret.setId(srs.getString(3)); - // - // ret.setNamespaceId(srs.getString(5)); - // ret.setProjectId(nullify(srs.getString(6))); - // - // ret.setPrivateModel(srs.getBoolean(9)); - // ret.setResolved(srs.getBoolean(10)); - // ret.setReification(srs.getBoolean(11)); - // ret.setInScenario(srs.getBoolean(12)); - // ret.setHasDirectObjects(srs.getBoolean(13)); - // ret.setHasDirectData(srs.getBoolean(14)); - // ret.setTimeStart(srs.getLong(15)); - // ret.setTimeEnd(srs.getLong(16)); - // ret.setSpatial(srs.getBoolean(17)); - // ret.setTemporal(srs.getBoolean(18)); - // ret.setTimeMultiplicity(srs.getLong(19)); - // ret.setSpaceMultiplicity(srs.getLong(20)); - // ret.setScaleMultiplicity(srs.getLong(21)); - // ret.setDereifyingAttribute(nullify(srs.getString(22))); - // ret.setMinSpatialScaleFactor(srs.getInt(23)); - // ret.setMaxSpatialScaleFactor(srs.getInt(24)); - // ret.setMinTimeScaleFactor(srs.getInt(25)); - // ret.setMaxTimeScaleFactor(srs.getInt(26)); - // Geometry geometry = srs.getGeometry(27); - // if (!geometry.isEmpty()) { - // ret.setShape(Shape.create(geometry, Projection.getLatLon())); // + - // } - // } catch (SQLException e) { - // throw new KlabStorageException(e); - // } - // } - // - // }); - // - // ret.setMetadata(getMetadataFor(oid)); - // - // return ret; + } else { + toStore.add(o); } - @Override - protected String getMainTableId() { - return "model"; + long ret = -1; + for (Object obj : toStore) { + long r = super.store(obj, monitor); + if (ret < 0) ret = r; } - /** - * @param name - * @return true if model with given id exists in database - */ - public boolean hasModel(String name) { + return ret; + } - if (!database.hasTable("model")) { - return false; - } + public static final String DUMMY_NAMESPACE_ID = "DUMMY_SEARCH_NS"; - return database.queryIds("SELECT oid FROM model WHERE name = '" + name + "';").size() > 0; - } + /** + * Return a collection of model beans that contains all the models implied by a model statement + * (and the model itself, when appropriate). + * + * @param model + * @param monitor + * @return the models implied by the statement + */ + public Collection inferModels(KimModel model, Scope monitor) { - @Override - protected int deleteAllObjectsWithNamespace(String namespaceId, Channel monitor) { - initialize(monitor); - int n = 0; - for (long oid : database - .queryIds("SELECT oid FROM model where namespaceid = '" + Utils.Escape.forSQL( - namespaceId) + "';")) { - deleteObjectWithId(oid, monitor); - n++; - } - return n; - } + List ret = new ArrayList<>(); - @Override - protected void deleteObjectWithId(long id, Channel monitor) { - initialize(monitor); - database.execute("DELETE FROM model WHERE oid = " + id); - deleteMetadataFor(id); - } + boolean isInstantiator = + !model.getObservables().isEmpty() + && model.getObservables().getFirst().getSemantics().isCollective(); - @Override - public long store(Object o, Scope monitor) { + // happens in error + if (model.getObservables().size() == 0 || model.getObservables().get(0) == null) { + return ret; + } - initialize(monitor); + Observable mainObservable = + scope.getService(Reasoner.class).declareObservable(model.getObservables().get(0)); - // if (o instanceof KimNamespace && ((KimNamespace) o).isInternal()) { - // return 0; - // } + ret.addAll(getModelDescriptors(model, monitor)); - ArrayList toStore = new ArrayList<>(); + if (!ret.isEmpty()) { - if (o instanceof KimModel) { + for (KimObservable attr : + model.getObservables().stream().filter(o -> o.getFormalName() != null).toList()) { - resourceService.serviceScope().debug("storing model " + ((KimModel) o).getUrn()); + Observable observable = scope.getService(Reasoner.class).declareObservable(attr); - for (ModelReference data : inferModels((KimModel) o, monitor)) { - toStore.add(data); - } - - } else { - toStore.add(o); + if (attr == null) { + // only in error + continue; } - long ret = -1; - for (Object obj : toStore) { - long r = super.store(obj, monitor); - if (ret < 0) - ret = r; + /* + * attribute type must have inherent type added if it's an instantiated quality + * (from an instantiator or as a secondary observable of a resolver with explicit, + * specialized inherency) + */ + Concept type = observable.getSemantics(); + if (isInstantiator) { + Concept context = scope.getService(Reasoner.class).inherent(type); + if (context == null + || !scope.getService(Reasoner.class).is(context, mainObservable.getSemantics())) { + type = observable.builder(monitor).of(mainObservable.getSemantics()).buildConcept(); + } } - - return ret; + ModelReference m = ret.get(0).copy(); + m.setObservable(type.getUrn()); + m.setObservableConcept(type); + m.setObservationType(observable.getDescriptionType().name()); + m.setDereifyingAttribute(attr.getFormalName()); + m.setMediation(Mediation.DEREIFY_QUALITY); + m.setPrimaryObservable(!isInstantiator); + ret.add(m); + } + + if (isInstantiator) { + // TODO add presence model for main observable type and + // dereifying models for all mandatory attributes of observable in context + } } - public static final String DUMMY_NAMESPACE_ID = "DUMMY_SEARCH_NS"; + return ret; + } - /** - * Return a collection of model beans that contains all the models implied by a model statement (and the - * model itself, when appropriate). - * - * @param model - * @param monitor - * @return the models implied by the statement - */ - public Collection inferModels(KimModel model, Scope monitor) { + private Collection getModelDescriptors(KimModel model, Scope monitor) { - List ret = new ArrayList<>(); + List ret = new ArrayList<>(); + Scale scale = Scale.create(resourceService.modelGeometry(model.getUrn())); - boolean isInstantiator = - !model.getObservables().isEmpty() && model.getObservables().getFirst().getSemantics().isCollective(); + Shape spaceExtent = null; + Time timeExtent = null; + long spaceMultiplicity = -1; + long timeMultiplicity = -1; + long scaleMultiplicity = 1; + long timeStart = -1; + long timeEnd = -1; + boolean isSpatial = false; + boolean isTemporal = false; + String enumeratedSpaceDomain = null; + String enumeratedSpaceLocation = null; + Project project = resourceService.resolveProject(model.getProjectName(), scope); + KimNamespace namespace = resourceService.resolveNamespace(model.getNamespace(), scope); - // happens in error - if (model.getObservables().size() == 0 || model.getObservables().get(0) == null) { - return ret; - } + if (scale != null) { - Observable mainObservable = - scope.getService(Reasoner.class).declareObservable(model.getObservables().get(0)); - - ret.addAll(getModelDescriptors(model, monitor)); - - if (!ret.isEmpty()) { - - for (KimObservable attr : - model.getObservables().stream().filter(o -> o.getFormalName() != null).toList()) { - - Observable observable = scope.getService(Reasoner.class).declareObservable(attr); - - if (attr == null) { - // only in error - continue; - } - - /* - * attribute type must have inherent type added if it's an instantiated quality - * (from an instantiator or as a secondary observable of a resolver with explicit, - * specialized inherency) - */ - Concept type = observable.getSemantics(); - if (isInstantiator) { - Concept context = scope.getService(Reasoner.class).inherent(type); - if (context == null || !scope.getService(Reasoner.class).is( - context, - mainObservable.getSemantics())) { - type = observable.builder(monitor).of(mainObservable.getSemantics()).buildConcept(); - } - } - ModelReference m = ret.get(0).copy(); - m.setObservable(type.getUrn()); - m.setObservableConcept(type); - m.setObservationType(observable.getDescriptionType().name()); - m.setDereifyingAttribute(attr.getFormalName()); - m.setMediation(Mediation.DEREIFY_QUALITY); - m.setPrimaryObservable(!isInstantiator); - ret.add(m); - } + scaleMultiplicity = scale.size(); - if (isInstantiator) { - // TODO add presence model for main observable type and - // dereifying models for all mandatory attributes of observable in context - } - } + /* + * If the runtime allows, resolve any enumeration to physical extents + */ + Space space = resolveEnumeratedExtensions(scale.getSpace()); + Time time = resolveEnumeratedExtensions(scale.getTime()); - return ret; - } + if (space /* still */ instanceof EnumeratedExtension) { + /* + * TODO handle the enumerated extension + */ + throw new KlabUnimplementedException("enumerated extension"); + // Pair defs = ((EnumeratedExtension) + // scale.getSpace()).getExtension(); + // enumeratedSpaceDomain = defs.getFirst(); + // enumeratedSpaceLocation = defs.getSecond(); + } else if (space != null) { + spaceExtent = space.getGeometricShape(); + // may be null when we just say 'over space'. + if (spaceExtent != null) { + spaceExtent = spaceExtent.transform(Projection.getLatLon()); + spaceMultiplicity = space.size(); + } + isSpatial = true; + } - private Collection getModelDescriptors(KimModel model, Scope monitor) { - - List ret = new ArrayList<>(); - Scale scale = Scale.create(resourceService.modelGeometry(model.getUrn())); - - Shape spaceExtent = null; - Time timeExtent = null; - long spaceMultiplicity = -1; - long timeMultiplicity = -1; - long scaleMultiplicity = 1; - long timeStart = -1; - long timeEnd = -1; - boolean isSpatial = false; - boolean isTemporal = false; - String enumeratedSpaceDomain = null; - String enumeratedSpaceLocation = null; - Project project = resourceService.resolveProject(model.getProjectName(), scope); - KimNamespace namespace = resourceService.resolveNamespace(model.getNamespace(), scope); - - if (scale != null) { - - scaleMultiplicity = scale.size(); - - /* - * If the runtime allows, resolve any enumeration to physical extents - */ - Space space = resolveEnumeratedExtensions(scale.getSpace()); - Time time = resolveEnumeratedExtensions(scale.getTime()); - - if (space /* still */ instanceof EnumeratedExtension) { - /* - * TODO handle the enumerated extension - */ - throw new KlabUnimplementedException("enumerated extension"); - // Pair defs = ((EnumeratedExtension) - // scale.getSpace()).getExtension(); - // enumeratedSpaceDomain = defs.getFirst(); - // enumeratedSpaceLocation = defs.getSecond(); - } else if (space != null) { - spaceExtent = space.getGeometricShape(); - // may be null when we just say 'over space'. - if (spaceExtent != null) { - spaceExtent = spaceExtent.transform(Projection.getLatLon()); - spaceMultiplicity = space.size(); - } - isSpatial = true; + if (time != null) { + if (time /* still */ instanceof EnumeratedExtension) { + // TODO + throw new KlabUnimplementedException("enumerated extension"); + } else { + timeExtent = time.collapsed(); + if (timeExtent != null) { + if (timeExtent.getStart() != null) { + timeStart = timeExtent.getStart().getMilliseconds(); } - - if (time != null) { - if (time /* still */ instanceof EnumeratedExtension) { - // TODO - throw new KlabUnimplementedException("enumerated extension"); - } else { - timeExtent = time.collapsed(); - if (timeExtent != null) { - if (timeExtent.getStart() != null) { - timeStart = timeExtent.getStart().getMilliseconds(); - } - if (timeExtent.getEnd() != null) { - timeEnd = timeExtent.getEnd().getMilliseconds(); - } - } - } - timeMultiplicity = time.size(); - isTemporal = true; + if (timeExtent.getEnd() != null) { + timeEnd = timeExtent.getEnd().getMilliseconds(); } + } } + timeMultiplicity = time.size(); + isTemporal = true; + } + } - boolean first = true; - Observable main = null; - for (KimObservable kobs : model.getObservables()) { + boolean first = true; + Observable main = null; + for (KimObservable kobs : model.getObservables()) { - Observable oobs = scope.getService(Reasoner.class).declareObservable(kobs); + Observable oobs = scope.getService(Reasoner.class).declareObservable(kobs); - if (first) { - main = oobs; - } + if (first) { + main = oobs; + } - boolean isInstantiator = - !model.getObservables().isEmpty() && model.getObservables().getFirst().getSemantics().isCollective(); - - for (Observable obs : unpackObservables(oobs, main, first, monitor)) { - - ModelReference m = new ModelReference(); - - m.setName(model.getUrn()); - m.setNamespaceId(model.getNamespace()); - // if (model.getNamespace().getProject() != null) { - m.setProjectId(model.getProjectName()); - // if (model.getNamespace().getProject().isRemote()) { - // m.setServerId(model.getNamespace().getProject().getOriginatingNodeId()); - // } - // } - - if (project != null) { - m.setPermissions(project.getManifest().getPrivileges()); - } - - m.setTimeEnd(timeEnd); - m.setTimeStart(timeStart); - m.setTimeMultiplicity(timeMultiplicity); - m.setSpaceMultiplicity(spaceMultiplicity); - m.setScaleMultiplicity(scaleMultiplicity); - m.setSpatial(isSpatial); - m.setTemporal(isTemporal); - m.setShape(spaceExtent); - m.setEnumeratedSpaceDomain(enumeratedSpaceDomain); - m.setEnumeratedSpaceLocation(enumeratedSpaceLocation); - - m.setObservable(obs.getUrn()); - m.setObservationType(obs.getDescriptionType().name()); - m.setObservableConcept(obs.getSemantics()); - // m.setObservationConcept(obs.getObservationType()); - - m.setScope(model.getScope()); - m.setInScenario(namespace.isScenario()); - m.setReification(isInstantiator); - m.setResolved(model.getDependencies().isEmpty()); - m.setHasDirectData(m.isResolved() && model.getObservables().getFirst().getSemantics().is( - SemanticType.QUALITY)); - m.setHasDirectObjects( - m.isResolved() && model.getObservables().get(0).getSemantics().is( - SemanticType.DIRECT_OBSERVABLE)); - - m.setMinSpatialScaleFactor(model.getMetadata().get( - Metadata.IM_MIN_SPATIAL_SCALE, - Space.MIN_SCALE_RANK)); - m.setMaxSpatialScaleFactor(model.getMetadata().get( - Metadata.IM_MAX_SPATIAL_SCALE, - Space.MAX_SCALE_RANK)); - m.setMinTimeScaleFactor(model.getMetadata().get( - Metadata.IM_MIN_TEMPORAL_SCALE, - Time.MIN_SCALE_RANK)); - m.setMaxTimeScaleFactor(model.getMetadata().get( - Metadata.IM_MAX_TEMPORAL_SCALE, - Time.MAX_SCALE_RANK)); - - m.setPrimaryObservable(first); - - // if (first && obs.isSpecialized()) { - // m.setSpecializedObservable(true); - // } - - first = false; - - m.setMetadata(translateMetadata(model.getMetadata())); - - ret.add(m); + boolean isInstantiator = + !model.getObservables().isEmpty() + && model.getObservables().getFirst().getSemantics().isCollective(); - } + for (Observable obs : unpackObservables(oobs, main, first, monitor)) { - /* - * For now just disable additional observables in instantiators and use their attribute - * observers upstream. We may do different things here: - * - * 0. keep ignoring them 1. keep them all, contextualized to the instantiated - * observable; 2. keep only the non-statically contextualized ones (w/o the value) - * - */ - if (isInstantiator) { - break; - } + ModelReference m = new ModelReference(); - } - return ret; - } + m.setName(model.getUrn()); + m.setNamespaceId(model.getNamespace()); + // if (model.getNamespace().getProject() != null) { + m.setProjectId(model.getProjectName()); + // if (model.getNamespace().getProject().isRemote()) { + // m.setServerId(model.getNamespace().getProject().getOriginatingNodeId()); + // } + // } - @SuppressWarnings("unchecked") - private > T resolveEnumeratedExtensions(T extent) { - if (extent instanceof EnumeratedExtension) { - return (T) ((EnumeratedExtension) extent).getPhysicalExtent(); + if (project != null) { + m.setPermissions(project.getManifest().getPrivileges()); } - return extent; - } - private List unpackObservables(Observable oobs, Observable main, boolean first, - Scope monitor) { - - List ret = new ArrayList<>(); - if (!first) { - /** - * Subsequent observables inherit any explicit specialization in the main observable of - * a model - */ - Concept specialized = scope.getService(Reasoner.class).directInherent(main.getSemantics()); - Concept oobsContext = scope.getService(Reasoner.class).inherent(oobs); - if (specialized != null - && (oobsContext == null || !scope.getService(Reasoner.class).is( - oobsContext, - specialized))) { - oobs = oobs.builder(monitor).of(specialized).build(); - } - } - ret.add(oobs); - return ret; - } + m.setTimeEnd(timeEnd); + m.setTimeStart(timeStart); + m.setTimeMultiplicity(timeMultiplicity); + m.setSpaceMultiplicity(spaceMultiplicity); + m.setScaleMultiplicity(scaleMultiplicity); + m.setSpatial(isSpatial); + m.setTemporal(isTemporal); + m.setShape(spaceExtent); + m.setEnumeratedSpaceDomain(enumeratedSpaceDomain); + m.setEnumeratedSpaceLocation(enumeratedSpaceLocation); + + m.setObservable(obs.getUrn()); + m.setObservationType(obs.getDescriptionType().name()); + m.setObservableConcept(obs.getSemantics()); + // m.setObservationConcept(obs.getObservationType()); + + m.setScope(model.getScope()); + m.setInScenario(namespace.isScenario()); + m.setReification(isInstantiator); + m.setResolved(model.getDependencies().isEmpty()); + m.setHasDirectData( + m.isResolved() + && model.getObservables().getFirst().getSemantics().is(SemanticType.QUALITY)); + m.setHasDirectObjects( + m.isResolved() + && model.getObservables().get(0).getSemantics().is(SemanticType.DIRECT_OBSERVABLE)); + + m.setMinSpatialScaleFactor( + model.getMetadata().get(Metadata.IM_MIN_SPATIAL_SCALE, Space.MIN_SCALE_RANK)); + m.setMaxSpatialScaleFactor( + model.getMetadata().get(Metadata.IM_MAX_SPATIAL_SCALE, Space.MAX_SCALE_RANK)); + m.setMinTimeScaleFactor( + model.getMetadata().get(Metadata.IM_MIN_TEMPORAL_SCALE, Time.MIN_SCALE_RANK)); + m.setMaxTimeScaleFactor( + model.getMetadata().get(Metadata.IM_MAX_TEMPORAL_SCALE, Time.MAX_SCALE_RANK)); + + m.setPrimaryObservable(first); + + // if (first && obs.isSpecialized()) { + // m.setSpecializedObservable(true); + // } - private static Map translateMetadata(Metadata metadata) { - Map ret = new HashMap<>(); - for (String key : metadata.keySet()) { - ret.put(key, metadata.get(key) == null ? "null" : metadata.get(key).toString()); - } - return ret; + first = false; + + m.setMetadata(translateMetadata(model.getMetadata())); + + ret.add(m); + } + + /* + * For now just disable additional observables in instantiators and use their attribute + * observers upstream. We may do different things here: + * + * 0. keep ignoring them 1. keep them all, contextualized to the instantiated + * observable; 2. keep only the non-statically contextualized ones (w/o the value) + * + */ + if (isInstantiator) { + break; + } } + return ret; + } - public ModelReference retrieveModel(String string, Channel monitor) { - return retrieve("SELECT * FROM model WHERE name = '" + string + "'", monitor); + @SuppressWarnings("unchecked") + private > T resolveEnumeratedExtensions(T extent) { + if (extent instanceof EnumeratedExtension) { + return (T) ((EnumeratedExtension) extent).getPhysicalExtent(); + } + return extent; + } + + private List unpackObservables( + Observable oobs, Observable main, boolean first, Scope monitor) { + + List ret = new ArrayList<>(); + if (!first) { + /** + * Subsequent observables inherit any explicit specialization in the main observable of a + * model + */ + Concept specialized = scope.getService(Reasoner.class).directInherent(main.getSemantics()); + Concept oobsContext = scope.getService(Reasoner.class).inherent(oobs); + if (specialized != null + && (oobsContext == null + || !scope.getService(Reasoner.class).is(oobsContext, specialized))) { + oobs = oobs.builder(monitor).of(specialized).build(); + } + } + ret.add(oobs); + return ret; + } + + private static Map translateMetadata(Metadata metadata) { + Map ret = new HashMap<>(); + for (String key : metadata.keySet()) { + ret.put(key, metadata.get(key) == null ? "null" : metadata.get(key).toString()); } + return ret; + } + public ModelReference retrieveModel(String string, Channel monitor) { + return retrieve("SELECT * FROM model WHERE name = '" + string + "'", monitor); + } } diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ModelReference.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ModelReference.java index bbe64c8ba..57344cbe3 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ModelReference.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ModelReference.java @@ -112,7 +112,7 @@ public static enum Mediation { private int maxTimeScaleFactor = Time.MAX_SCALE_RANK; private int priority = 0; private boolean specializedObservable = false; - private ResourcePrivileges permissions = ResourcePrivileges.PUBLIC; // TODO these must come from the service + private ResourcePrivileges permissions = ResourcePrivileges.PUBLIC; private Version version; transient private Concept observableConcept; diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ObservableKbox.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ObservableKbox.java index a502220a3..b343a617c 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ObservableKbox.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ObservableKbox.java @@ -1,20 +1,20 @@ /******************************************************************************* * Copyright (C) 2007, 2015: - * + * * - Ferdinando Villa - integratedmodelling.org - any other * authors listed in @author annotations * * All rights reserved. This file is part of the k.LAB software suite, meant to enable modular, * collaborative, integrated development of interoperable data and model components. For details, * see http://integratedmodelling.org. - * + * * This program is free software; you can redistribute it and/or modify it under the terms of the * Affero General Public License Version 3 or any later version. * * This program is distributed in the hope that it will be useful, but without any warranty; without * even the implied warranty of merchantability or fitness for a particular purpose. See the Affero * General Public License for more details. - * + * * You should have received a copy of the Affero General Public License along with this program; if * not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA * 02111-1307, USA. The license is also available at: https://www.gnu.org/licenses/agpl.html @@ -44,598 +44,650 @@ import java.util.*; /** - * Design principles: - *

- * * This is a hybrid kbox that depends on the reasoner knowing all concepts in it. For this reason, - * it holds an index of the definitions of any compound observable concept ever stored in it, and - * assigns an integer ID to it. The table is read on startup so that all concepts are known to the - * reasoner, and any that are not will not be retrievable. - *

- * * The main table is CONCEPTS, containing simply the ID of the type and its definition. A query + * Design principles: * This is a hybrid kbox that depends on the reasoner knowing all concepts in + * it. For this reason, it holds an index of the definitions of any compound observable concept ever + * stored in it, and assigns an integer ID to it. The table is read on startup so that all concepts + * are known to the reasoner, and any that are not will not be retrievable. + * + *

* The main table is CONCEPTS, containing simply the ID of the type and its definition. A query * for any instance stored by derived kboxes with its ID in the set will return compatible - * observables. - *

- * For convenience, the kbox also maintains a METADATA table for POD objects and exposes simple - * methods to store/retrieve/delete metadata beans. - *

- * TODO For now the table does not maintain reference counts, so it is possible that concepts IDs - * are referenced that are no longer represented because the corresponding observations that have - * been deleted. Refcounting is easy to implement but costly at store/delete, so let's see how - * problematic this becomes. - * - * @author ferdinando.villa + * observables. For convenience, the kbox also maintains a METADATA table for POD objects and + * exposes simple methods to store/retrieve/delete metadata beans. TODO For now the table does not + * maintain reference counts, so it is possible that concepts IDs are referenced that are no longer + * represented because the corresponding observations that have been deleted. Refcounting is easy to + * implement but costly at store/delete, so let's see how problematic this becomes. * + * @author ferdinando.villa */ public abstract class ObservableKbox extends H2Kbox { - private Map definitionHash = new HashMap<>(); - private Map typeHash = new HashMap<>(); - private Map> coreTypeHash = new HashMap<>(); - private Map conceptHash = new HashMap<>(); - -// protected Reasoner reasoner; - protected Scope scope; - protected ResourcesService resourceService; - - /** - * The version is used to create storage on the file system. Change this when incompatible - * changes are made to force a rebuild. - */ - public static final String KBOX_VERSION = "0120v0"; - - /* - * exposed to allow preallocating connections in big imports. - */ - public H2Database getDatabase() { - return this.database; - } - - public Observable getType(long id) { - if (typeHash.containsKey(id)) { - // FIXME this looks yucky - nonsemantic must compile just like the others - // if (typeHash.get(id).startsWith("nonsemantic:")) { - // return - // reasoner.declareObservable(resourceService.resolveObservable(typeHash.get(id))); - // } - return scope.getService(Reasoner.class).declareObservable(resourceService.resolveObservable(typeHash.get(id))); - } - return null; - } - - public String getTypeDefinition(long id) { - return typeHash.get(id); + private Map definitionHash = new HashMap<>(); + private Map typeHash = new HashMap<>(); + private Map> coreTypeHash = new HashMap<>(); + private Map conceptHash = new HashMap<>(); + + // protected Reasoner reasoner; + protected Scope scope; + protected ResourcesService resourceService; + + /** + * The version is used to create storage on the file system. Change this when incompatible changes + * are made to force a rebuild. + */ + public static final String KBOX_VERSION = "0120v0"; + + /* + * exposed to allow preallocating connections in big imports. + */ + public H2Database getDatabase() { + return this.database; + } + + public Observable getType(long id) { + if (typeHash.containsKey(id)) { + // FIXME this looks yucky - nonsemantic must compile just like the others + // if (typeHash.get(id).startsWith("nonsemantic:")) { + // return + // reasoner.declareObservable(resourceService.resolveObservable(typeHash.get(id))); + // } + return scope + .getService(Reasoner.class) + .declareObservable(resourceService.resolveObservable(typeHash.get(id))); } + return null; + } - protected String joinStringConditions(String field, Collection stringValues, String operator) { + public String getTypeDefinition(long id) { + return typeHash.get(id); + } - StringBuilder ret = new StringBuilder(); + protected String joinStringConditions(String field, Collection stringValues, String operator) { - for (Object o : stringValues) { - ret.append((ret.length() == 0) ? "" : (" " + operator + " ")).append(field).append(" = '").append(o).append("'"); - } + StringBuilder ret = new StringBuilder(); - return ret.toString(); + for (Object o : stringValues) { + ret.append((ret.length() == 0) ? "" : (" " + operator + " ")) + .append(field) + .append(" = '") + .append(o) + .append("'"); } - /** - * Get the ID of the table that contains the "primary" object we provide. Used to check for - * empty database - if this is not there, either nothing needs to be done or initialization - * needs to be performed. - * - * @return - */ - protected abstract String getMainTableId(); + return ret.toString(); + } - /** - * Delete all objects in the passed namespace and return the number of objects deleted. - * - * @param namespaceId - * @return - */ - protected abstract int deleteAllObjectsWithNamespace(String namespaceId, Channel monitor); + /** + * Get the ID of the table that contains the "primary" object we provide. Used to check for empty + * database - if this is not there, either nothing needs to be done or initialization needs to be + * performed. + * + * @return + */ + protected abstract String getMainTableId(); - protected abstract void deleteObjectWithId(long id, Channel monitor); + /** + * Delete all objects in the passed namespace and return the number of objects deleted. + * + * @param namespaceId + * @return + */ + protected abstract int deleteAllObjectsWithNamespace(String namespaceId, Channel monitor); - protected abstract void initialize(Channel monitor); + protected abstract void deleteObjectWithId(long id, Channel monitor); - public int clearNamespace(String namespaceId, Channel monitor) { + protected abstract void initialize(Channel monitor); - initialize(monitor); - - if (!database.hasTable(getMainTableId())) { - return 0; - } + public int clearNamespace(String namespaceId, Channel monitor) { - int n = deleteAllObjectsWithNamespace(namespaceId, monitor); + initialize(monitor); - database.execute("DELETE FROM namespaces where id = '" + namespaceId + "';"); - - return n; + if (!database.hasTable(getMainTableId())) { + return 0; } - /** - * Count the objects in the main table. - * - * @return number of observations - */ - public long count() { + int n = deleteAllObjectsWithNamespace(namespaceId, monitor); - if (!database.hasTable(getMainTableId())) { - return 0; - } - List ret = database.queryIds("SELECT COUNT(*) from " + getMainTableId() + ";"); - return !ret.isEmpty() ? ret.getFirst() : 0L; - } + database.execute("DELETE FROM namespaces where id = '" + namespaceId + "';"); - class ObservableSchema implements Schema { + return n; + } - @Override - public String getCreateSQL() { - return "CREATE TABLE concepts (" + "oid LONG PRIMARY KEY, " + "definition VARCHAR(1024), " + "refcount LONG" + "); " - + "CREATE TABLE metadata (" + "fid LONG, " + "mkey VARCHAR(256), " + "mvalue OTHER" + ");" - + "CREATE INDEX concepts_oid_index ON concepts(oid); " - + "CREATE INDEX concepts_definition_index ON concepts(definition); " - + "CREATE INDEX metadata_oid_index ON metadata(fid); "; - } + /** + * Count the objects in the main table. + * + * @return number of observations + */ + public long count() { - @Override - public String getTableName() { - return "concepts"; - } + if (!database.hasTable(getMainTableId())) { + return 0; } - - static public class NamespaceSchema implements Schema { - - @Override - public String getCreateSQL() { - // TODO FIXME primary key on NS URN is giving errors although there seems to be - // no violation. Must investigate. - // return "CREATE TABLE namespaces (" + "id VARCHAR(256) PRIMARY KEY, " + "timestamp - // LONG, " - // + "isscenario BOOLEAN" + "); " + "CREATE INDEX namespace_id_index ON namespaces(id); - // "; - return "CREATE TABLE namespaces (" + "id VARCHAR(256) PRIMARY KEY, " + "timestamp LONG, " + "isscenario BOOLEAN" - + "); " + "CREATE INDEX namespace_id_index ON namespaces(id); "; - } - - @Override - public String getTableName() { - return "namespaces"; - } + List ret = database.queryIds("SELECT COUNT(*) from " + getMainTableId() + ";"); + return !ret.isEmpty() ? ret.getFirst() : 0L; + } + + class ObservableSchema implements Schema { + + @Override + public String getCreateSQL() { + return "CREATE TABLE concepts (" + + "oid LONG PRIMARY KEY, " + + "definition VARCHAR(1024), " + + "refcount LONG" + + "); " + + "CREATE TABLE metadata (" + + "fid LONG, " + + "mkey VARCHAR(256), " + + "mvalue OTHER" + + ");" + + "CREATE INDEX concepts_oid_index ON concepts(oid); " + + "CREATE INDEX concepts_definition_index ON concepts(definition); " + + "CREATE INDEX metadata_oid_index ON metadata(fid); "; } - static class NamespaceSerializer implements Serializer { - - @Override - public String serialize(KimNamespace ns, long primaryKey, long foreignKey) { - - String ret = null; - if (ns != null) { - ret = "DELETE FROM namespaces WHERE id = '" + ns.getUrn() + "'; INSERT INTO namespaces VALUES ('" - + Utils.Escape.forSQL(ns.getUrn()) + "', " + ns.getLastUpdateTimestamp() + ", " - + (ns.isScenario() ? "TRUE" : "FALSE") + ");"; - } - return ret; - } + @Override + public String getTableName() { + return "concepts"; } - - /** - * Get the ID correspondent to the passed concept, and if unavailable return -1. Does not use - * the database so it's very fast. - * - * @param c - * @return the ID for the concept, or -1 if not seen before - */ - public long getConceptId(Concept c) { - Long ret = definitionHash.get(c.getUrn()); - return ret == null ? -1L : ret; + } + + public static class NamespaceSchema implements Schema { + + @Override + public String getCreateSQL() { + // TODO FIXME primary key on NS URN is giving errors although there seems to be + // no violation. Must investigate. + // return "CREATE TABLE namespaces (" + "id VARCHAR(256) PRIMARY KEY, " + "timestamp + // LONG, " + // + "isscenario BOOLEAN" + "); " + "CREATE INDEX namespace_id_index ON namespaces(id); + // "; + return "CREATE TABLE namespaces (" + + "id VARCHAR(256) PRIMARY KEY, " + + "timestamp LONG, " + + "isscenario BOOLEAN" + + "); " + + "CREATE INDEX namespace_id_index ON namespaces(id); "; } - public List getKnownDefinitions() { - List ret = new ArrayList<>(); - ret.addAll(definitionHash.keySet()); - Collections.sort(ret); - return ret; + @Override + public String getTableName() { + return "namespaces"; + } + } + + static class NamespaceSerializer implements Serializer { + + @Override + public String serialize(KimNamespace ns, long primaryKey, long foreignKey) { + + String ret = null; + if (ns != null) { + ret = + "DELETE FROM namespaces WHERE id = '" + + ns.getUrn() + + "'; INSERT INTO namespaces VALUES ('" + + Utils.Escape.forSQL(ns.getUrn()) + + "', " + + ns.getLastUpdateTimestamp() + + ", " + + (ns.isScenario() ? "TRUE" : "FALSE") + + ");"; + } + return ret; + } + } + + /** + * Get the ID correspondent to the passed concept, and if unavailable return -1. Does not use the + * database so it's very fast. + * + * @param c + * @return the ID for the concept, or -1 if not seen before + */ + public long getConceptId(Concept c) { + Long ret = definitionHash.get(c.getUrn()); + return ret == null ? -1L : ret; + } + + public List getKnownDefinitions() { + List ret = new ArrayList<>(); + ret.addAll(definitionHash.keySet()); + Collections.sort(ret); + return ret; + } + + /** + * Check that the passed observable has been inserted, and if not make sure it is represented in + * the database. Return the stable ID to use for storing records that use it. + * + * @param observable + * @param monitor + * @return the ID for the observable, creating as necessary + */ + public long requireConceptId(Concept observable, Channel monitor) { + + long ret = getConceptId(observable); + if (ret >= 0) { + return ret; } - /** - * Check that the passed observable has been inserted, and if not make sure it is represented in - * the database. Return the stable ID to use for storing records that use it. - * - * @param observable - * @param monitor - * @return the ID for the observable, creating as necessary - */ - public long requireConceptId(Concept observable, Channel monitor) { - - long ret = getConceptId(observable); - if (ret >= 0) { - return ret; - } - - try { - final String definition = observable.getUrn(); + try { + final String definition = observable.getUrn(); - ret = database.storeObject(observable, 0, new Serializer(){ + ret = + database.storeObject( + observable, + 0, + new Serializer() { @Override - public String serialize(Concept o, /* Schema schema, */ long primaryKey, long foreignKey) { - return "INSERT INTO concepts VALUES (" + primaryKey + ", '" + definition + "', 1);"; + public String serialize( + Concept o, /* Schema schema, */ long primaryKey, long foreignKey) { + return "INSERT INTO concepts VALUES (" + + primaryKey + + ", '" + + definition + + "', 1);"; } - }, monitor); - - definitionHash.put(definition, ret); - typeHash.put(ret, definition); - conceptHash.put(definition, observable); - - // store all existing definitions with same core type - Concept coreType = scope.getService(Reasoner.class).coreObservable(observable); - String cdef = coreType.getUrn(); - Set cset = coreTypeHash.get(cdef); - if (cset == null) { - cset = new HashSet<>(); - coreTypeHash.put(cdef, cset); - } - cset.add(definition); - conceptHash.put(cdef, coreType); - - } catch (KlabException e) { - throw new KlabStorageException(e); - } + }, + monitor); + + definitionHash.put(definition, ret); + typeHash.put(ret, definition); + conceptHash.put(definition, observable); + + // store all existing definitions with same core type + Concept coreType = scope.getService(Reasoner.class).coreObservable(observable); + String cdef = coreType.getUrn(); + Set cset = coreTypeHash.get(cdef); + if (cset == null) { + cset = new HashSet<>(); + coreTypeHash.put(cdef, cset); + } + cset.add(definition); + conceptHash.put(cdef, coreType); + + } catch (KlabException e) { + throw new KlabStorageException(e); + } - return ret; + return ret; + } + + /** + * Determine all the compatible MODEL concepts for which observables have been stored, and return + * the set of their IDs. + * + *

If the core type is concrete, only that core type is looked up in the observable's parents, + * so that models that observe that type (potentially with other traits not adopted by the + * observable and in any compatible context) are found. If the core type is abstract or was stated + * generic, any child is OK as long as trait, roles, inherency and context are compatible. + * + * @param observable + * @return the IDs of all compatible concepts that have been used in the kbox. + */ + public Set getCompatibleTypeIds(Observable observable, Concept context) { + + Set ret = new HashSet<>(); + Concept main = scope.getService(Reasoner.class).coreObservable(observable); + if (main == null) { + /* + * not a domain concept or abstract; can't have observables. + */ + return ret; } - /** - * Determine all the compatible MODEL concepts for which observables have been stored, and - * return the set of their IDs. - * - * If the core type is concrete, only that core type is looked up in the observable's parents, - * so that models that observe that type (potentially with other traits not adopted by the - * observable and in any compatible context) are found. If the core type is abstract or was - * stated generic, any child is OK as long as trait, roles, inherency and context are - * compatible. - * - * @param observable - * @return the IDs of all compatible concepts that have been used in the kbox. + /* + * We lookup all models whose observable incarnates the core type, adding all possible + * specific models if the observable is abstract or the context requires generic matching + * ('any' dependencies). The initial set of candidates is weeded out of all incompatible or + * unrepresented concepts later. */ - public Set getCompatibleTypeIds(Observable observable, Concept context) { - - Set ret = new HashSet<>(); - Concept main = scope.getService(Reasoner.class).coreObservable(observable); - if (main == null) { - /* - * not a domain concept or abstract; can't have observables. - */ - return ret; - } - - /* - * We lookup all models whose observable incarnates the core type, adding all possible - * specific models if the observable is abstract or the context requires generic matching - * ('any' dependencies). The initial set of candidates is weeded out of all incompatible or - * unrepresented concepts later. - */ - for (Concept candidate : getCandidates(main, observable.getDescriptionType().isInstantiation(), - observable.getSpecializedComponents())) { - - /* - * let an abstract model resolve a concrete observable if the abstract traits are in the - * resolved predicates for the observable. If the observable contains the "specialized" - * flag, we don't compare the inherency, letting through models that are contextualized - * to specialized contexts - */ - - if (scope.getService(Reasoner.class).semanticDistance(candidate, observable, - context)/* + for (Concept candidate : + getCandidates( + main, + observable.getDescriptionType().isInstantiation(), + observable.getSpecializedComponents())) { + + /* + * let an abstract model resolve a concrete observable if the abstract traits are in the + * resolved predicates for the observable. If the observable contains the "specialized" + * flag, we don't compare the inherency, letting through models that are contextualized + * to specialized contexts + */ + + if (scope.getService(Reasoner.class).semanticDistance(candidate, observable, context) /* * TODO handle the resolved predicates? * candidate.getSemanticDistance(observable, context, * !observable.isSpecialized(), ((Observable) * observable).getResolvedPredicates()) - */ >= 0) { - // System.out.println(" YES"); - long id = getConceptId(candidate); - if (id >= 0) { - ret.add(id); - } - } /* - * else { System.out.println(" NOPE"); } - */ + */ + >= 0) { + // System.out.println(" YES"); + long id = getConceptId(candidate); + if (id >= 0) { + ret.add(id); } - - return ret; + } /* + * else { System.out.println(" NOPE"); } + */ } - /* - * FIXME use the description type directly - */ - private Set getCandidates(Concept concept, boolean instantiation, Collection> resolvedPredicates) { - - Set ret = new HashSet<>(); - for (Concept main : getAcceptableParents(concept, resolvedPredicates)) { - - Set defs = coreTypeHash.get(main.getUrn()); - if (defs != null) { - for (String def : defs) { - Concept candidate = conceptHash.get(def); - boolean ok = true; - -// if (candidate.is(SemanticType.PREDICATE)) { -// // inherency must align with the resolution mode -// boolean hasDistributedInherency = scope.getService(Reasoner.class).hasDistributedInherency(candidate); -// ok = (hasDistributedInherency && instantiation) || (!hasDistributedInherency && !instantiation); -// } - if (ok) { - ret.add(candidate); - } - } - } + return ret; + } + + /* + * FIXME use the description type directly + */ + private Set getCandidates( + Concept concept, + boolean instantiation, + Collection> resolvedPredicates) { + + Set ret = new HashSet<>(); + for (Concept main : getAcceptableParents(concept, resolvedPredicates)) { + + Set defs = coreTypeHash.get(main.getUrn()); + if (defs != null) { + for (String def : defs) { + Concept candidate = conceptHash.get(def); + boolean ok = true; + + // if (candidate.is(SemanticType.PREDICATE)) { + // // inherency must align with the resolution mode + // boolean hasDistributedInherency = + // scope.getService(Reasoner.class).hasDistributedInherency(candidate); + // ok = (hasDistributedInherency && instantiation) || + // (!hasDistributedInherency && !instantiation); + // } + if (ok) { + ret.add(candidate); + } } - return ret; + } } - - /** - * We only accept abstract parent concepts (up to the base observable) if the observable is a - * predicate. - * - * @param concept - * @param resolvedPredicates - * @return - */ - private List getAcceptableParents(Concept concept, Collection> resolvedPredicates) { - - List ret = new ArrayList<>(); - ret.add(concept); - if (concept.is(SemanticType.TRAIT) || concept.is(SemanticType.ROLE)) { - Concept base = scope.getService(Reasoner.class).baseParentTrait(concept); - if (base == null) { - return ret; - } - for (;;) { - concept = scope.getService(Reasoner.class).parent(concept); - ret.add(concept); - if (concept.isAbstract() || concept.equals(base)) { - break; - } - } - } - - if (resolvedPredicates != null && !resolvedPredicates.isEmpty()) { - List rabs = new ArrayList<>(); - for (Concept r : ret) { - rabs.add(replaceComponent(r, resolvedPredicates)); - } - ret.addAll(rabs); - } - + return ret; + } + + /** + * We only accept abstract parent concepts (up to the base observable) if the observable is a + * predicate. + * + * @param concept + * @param resolvedPredicates + * @return + */ + private List getAcceptableParents( + Concept concept, Collection> resolvedPredicates) { + + List ret = new ArrayList<>(); + ret.add(concept); + if (concept.is(SemanticType.TRAIT) || concept.is(SemanticType.ROLE)) { + Concept base = scope.getService(Reasoner.class).baseParentTrait(concept); + if (base == null) { return ret; - } - - public ObservableKbox(String name, Scope scope) { - - super(name); - - this.scope = scope; -// this.reasoner = scope.getService(Reasoner.class); - this.resourceService = scope.getService(ResourcesService.class); - -// if (this.reasoner == null || this.resourceService == null) { -// throw new KlabIllegalStateException("cannot initialize kbox without a valid reasoner or resource service"); -// } - - setSchema(Concept.class, new ObservableSchema()); - setSchema(KimNamespace.class, new NamespaceSchema()); - setSerializer(KimNamespace.class, new NamespaceSerializer()); - - try { - loadConcepts(); - } catch (KlabException e) { - throw new KlabStorageException(e); + } + for (; ; ) { + concept = scope.getService(Reasoner.class).parent(concept); + ret.add(concept); + if (concept.isAbstract() || concept.equals(base)) { + break; } + } } - private void loadConcepts() { - - if (!database.hasTable("concepts")) { - return; - } - - database.query("SELECT oid, definition FROM concepts", new SQL.SimpleResultHandler(){ - - @Override - public void onRow(ResultSet rs) { - try { - definitionHash.put(rs.getString(2), rs.getLong(1)); - typeHash.put(rs.getLong(1), rs.getString(2)); - } catch (SQLException e) { - throw new KlabStorageException(e); - } - } - }); + if (resolvedPredicates != null && !resolvedPredicates.isEmpty()) { + List rabs = new ArrayList<>(); + for (Concept r : ret) { + rabs.add(replaceComponent(r, resolvedPredicates)); + } + ret.addAll(rabs); } - /** - * De-nullify a string - use when filling in INSERT statements. - * - * @param o - * @return - */ - protected static String cn(Object o) { - return o == null ? "" : o.toString(); - } - - protected Map getMetadataFor(long oid) { - - class Handler extends SQL.SimpleResultHandler { + return ret; + } - Map ret = null; + public ObservableKbox(String name, Scope scope) { - @Override - public void onRow(ResultSet rs) { - try { - String key = rs.getString(2); - Object value = rs.getObject(3); + super(name); - if (key != null && value != null) { - if (ret == null) { - ret = new HashMap<>(); - } - ret.put(key, value.toString()); - } + this.scope = scope; + // this.reasoner = scope.getService(Reasoner.class); + this.resourceService = scope.getService(ResourcesService.class); - } catch (SQLException e) { - throw new KlabStorageException(e); - } - } - } + // if (this.reasoner == null || this.resourceService == null) { + // throw new KlabIllegalStateException("cannot initialize kbox without a valid + // reasoner or resource service"); + // } - Handler handler = new Handler(); - database.query("SELECT * FROM metadata WHERE fid = " + oid, handler); + setSchema(Concept.class, new ObservableSchema()); + setSchema(KimNamespace.class, new NamespaceSchema()); + setSerializer(KimNamespace.class, new NamespaceSerializer()); - return handler.ret; + try { + loadConcepts(); + } catch (KlabException e) { + throw new KlabStorageException(e); } + } - protected void deleteMetadataFor(long oid) throws KlabException { - database.execute("DELETE FROM metadata WHERE fid = " + oid); - } + private void loadConcepts() { - protected void storeMetadataFor(long oid, Map metadata) { + if (!database.hasTable("concepts")) { + return; + } - for (String s : metadata.keySet()) { + database.query( + "SELECT oid, definition FROM concepts", + new SQL.SimpleResultHandler() { - String sql = " INSERT INTO metadata VALUES (" + oid + ", "// + - // "fid - // LONG, - // " - + "'" + s + "', "// + "key VARCHAR(256), " - + "?"// + "value OTHER" - + ")"; + @Override + public void onRow(ResultSet rs) { try { - /* - * OK, must execute these right away unfortunately - so if something goes wrong with - * the object's storage these will remain in the DB. - */ - PreparedStatement prsql = database.getConnection().prepareStatement(sql); - prsql.setObject(1, metadata.get(s), Types.JAVA_OBJECT); - prsql.executeUpdate(); - } catch (Exception e) { - throw new KlabStorageException(e); + definitionHash.put(rs.getString(2), rs.getLong(1)); + typeHash.put(rs.getLong(1), rs.getString(2)); + } catch (SQLException e) { + throw new KlabStorageException(e); } - } - } + } + }); + } - /** - * Pass the a namespace to check if its objects need to be stored. If the stored namespace - * record does not exist or has a timestamp older than the passed one, remove all objects that - * belong to it and return true. Does not store a new namespace record - this should be done - * when this has returned true and there were no errors. - * - * Returns: 0 if no need to refresh, 1 if it must be entirely refreshed and every model and - * namespace record is removed from the kbox, and 2 if the models without errors need to be - * checked again (they may be in or not). - * - * - * @param namespace - * @param monitor - * @return result action code - */ - public int removeIfOlder(KimNamespace namespace, Channel monitor) { + /** + * De-nullify a string - use when filling in INSERT statements. + * + * @param o + * @return + */ + protected static String cn(Object o) { + return o == null ? "" : o.toString(); + } - if (!database.hasTable("namespaces")) { - return 1; - } + protected Map getMetadataFor(long oid) { - long dbTimestamp = getNamespaceTimestamp(namespace); - long timestamp = namespace.getLastUpdateTimestamp(); + class Handler extends SQL.SimpleResultHandler { - /* - * if we have stored something and we are younger than the stored ns, remove all models - * coming from it so we can add our new ones. - */ - if (timestamp > dbTimestamp) { + Map ret = null; - if (dbTimestamp > 0) { + @Override + public void onRow(ResultSet rs) { + try { + String key = rs.getString(2); + Object value = rs.getObject(3); - monitor.debug("Removing all observations in namespace " + namespace.getUrn()); - int removed = clearNamespace(namespace.getUrn(), monitor); - monitor.debug("Removed " + removed + " observations."); + if (key != null && value != null) { + if (ret == null) { + ret = new HashMap<>(); } + ret.put(key, value.toString()); + } - monitor.debug("Refreshing observations in " + namespace.getUrn() + ": stored " + new Date(dbTimestamp) + " < " - + new Date(timestamp)); - - return 1; + } catch (SQLException e) { + throw new KlabStorageException(e); } + } + } + Handler handler = new Handler(); + database.query("SELECT * FROM metadata WHERE fid = " + oid, handler); + + return handler.ret; + } + + protected void deleteMetadataFor(long oid) throws KlabException { + database.execute("DELETE FROM metadata WHERE fid = " + oid); + } + + protected void storeMetadataFor(long oid, Map metadata) { + + for (String s : metadata.keySet()) { + + String sql = + " INSERT INTO metadata VALUES (" + + oid + + ", " // + + // "fid + // LONG, + // " + + "'" + + s + + "', " // + "key VARCHAR(256), " + + "?" // + "value OTHER" + + ")"; + try { /* - * if we have not changed the source file but models had errors when stored, return the - * conservative mode so we can check model by model and only store those that are no longer - * in error due to external reasons. + * OK, must execute these right away unfortunately - so if something goes wrong with + * the object's storage these will remain in the DB. */ - if (namespace != null && Utils.Notifications.hasErrors(namespace.getNotifications())) { - return 2; - } - - return 0; + PreparedStatement prsql = database.getConnection().prepareStatement(sql); + prsql.setObject(1, metadata.get(s), Types.JAVA_OBJECT); + prsql.executeUpdate(); + } catch (Exception e) { + throw new KlabStorageException(e); + } + } + } + + /** + * Pass the a namespace to check if its objects need to be stored. If the stored namespace record + * does not exist or has a timestamp older than the passed one, remove all objects that belong to + * it and return true. Does not store a new namespace record - this should be done when this has + * returned true and there were no errors. + * + *

Returns: 0 if no need to refresh, 1 if it must be entirely refreshed and every model and + * namespace record is removed from the kbox, and 2 if the models without errors need to be + * checked again (they may be in or not). + * + * @param namespace + * @param monitor + * @return result action code + */ + public int removeIfOlder(KimNamespace namespace, Channel monitor) { + + if (!database.hasTable("namespaces")) { + return 1; } - public void remove(String namespaceId, Channel monitor) { + long dbTimestamp = getNamespaceTimestamp(namespace); + long timestamp = namespace.getLastUpdateTimestamp(); - if (!database.hasTable("namespaces")) { - return; - } - monitor.debug("Removing all observations in namespace " + namespaceId); - int removed = clearNamespace(namespaceId, monitor); + /* + * if we have stored something and we are younger than the stored ns, remove all models + * coming from it so we can add our new ones. + */ + if (timestamp > dbTimestamp) { + + if (dbTimestamp > 0) { + + monitor.debug("Removing all observations in namespace " + namespace.getUrn()); + int removed = clearNamespace(namespace.getUrn(), monitor); monitor.debug("Removed " + removed + " observations."); - } + } - /** - * Return 0 if namespace is not in the kbox, or the (long) timestamp of the namespace if it is. - * - * @param namespace - * - * @return result code - */ - public long getNamespaceTimestamp(KimNamespace namespace) throws KlabException { + monitor.debug( + "Refreshing observations in " + + namespace.getUrn() + + ": stored " + + new Date(dbTimestamp) + + " < " + + new Date(timestamp)); - if (!database.hasTable("namespaces")) { - return 0L; - } - List ret = database - .queryIds("SELECT timestamp FROM namespaces WHERE id = '" + Utils.Escape.forSQL(namespace.getUrn()) + "';"); - return ret.size() > 0 ? ret.get(0) : 0L; + return 1; } - protected static String nullify(String string) { - if (string == null || string.isEmpty()) { - return null; - } - return string; + /* + * if we have not changed the source file but models had errors when stored, return the + * conservative mode so we can check model by model and only store those that are no longer + * in error due to external reasons. + */ + if (namespace != null && Utils.Notifications.hasErrors(namespace.getNotifications())) { + return 2; } - /** - * TODO use a proper builder - * - * @param original - * @param replacements - * @return - */ - protected Concept replaceComponent(Concept original, Collection> replacements) { + return 0; + } - if (replacements.isEmpty()) { - return original; - } + public void remove(String namespaceId, Channel monitor) { - String declaration = original.getUrn(); - for (Pair key : replacements) { - String rep = key.getSecond().getUrn(); - if (rep.contains(" ")) { - rep = "(" + rep + ")"; - } - declaration = declaration.replace(key.getFirst().getUrn(), rep); - } + if (!database.hasTable("namespaces")) { + return; + } + monitor.debug("Removing all observations in namespace " + namespaceId); + int removed = clearNamespace(namespaceId, monitor); + monitor.debug("Removed " + removed + " observations."); + } + + /** + * Return 0 if namespace is not in the kbox, or the (long) timestamp of the namespace if it is. + * + * @param namespace + * @return result code + */ + public long getNamespaceTimestamp(KimNamespace namespace) throws KlabException { + + if (!database.hasTable("namespaces")) { + return 0L; + } + List ret = + database.queryIds( + "SELECT timestamp FROM namespaces WHERE id = '" + + Utils.Escape.forSQL(namespace.getUrn()) + + "';"); + return ret.size() > 0 ? ret.get(0) : 0L; + } + + protected static String nullify(String string) { + if (string == null || string.isEmpty()) { + return null; + } + return string; + } + + /** + * TODO use a proper builder + * + * @param original + * @param replacements + * @return + */ + protected Concept replaceComponent( + Concept original, Collection> replacements) { + + if (replacements.isEmpty()) { + return original; + } - return scope.getService(Reasoner.class).declareConcept(resourceService.resolveConcept(declaration)); + String declaration = original.getUrn(); + for (Pair key : replacements) { + String rep = key.getSecond().getUrn(); + if (rep.contains(" ")) { + rep = "(" + rep + ")"; + } + declaration = declaration.replace(key.getFirst().getUrn(), rep); } + return scope + .getService(Reasoner.class) + .declareConcept(resourceService.resolveConcept(declaration)); + } } diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ResourcesKBox.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ResourcesKBox.java index 633ce784d..ee85ccc7d 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ResourcesKBox.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/persistence/ResourcesKBox.java @@ -16,68 +16,71 @@ import java.io.File; /** - * Nitrite-based noSQL embedded storage for observables, resources, models and permissions. The URN is always - * the primary key. Disk-based with automatic backup. Can navigate semantics and spatial/temporal queries. + * Nitrite-based noSQL embedded storage for observables, resources, models and permissions. The URN + * is always the primary key. Disk-based with automatic backup. Can navigate semantics and + * spatial/temporal queries. */ public class ResourcesKBox { - private final Nitrite db; - private final File databaseFile; - private final ResourcesProvider resourcesProvider; + private final Nitrite db; + private final File databaseFile; + private final ResourcesProvider resourcesProvider; - public ResourcesKBox(Scope scope, ServiceStartupOptions options, ResourcesProvider service) { + public ResourcesKBox(Scope scope, ServiceStartupOptions options, ResourcesProvider service) { - this.resourcesProvider = service; - this.databaseFile = BaseService.getFileInConfigurationSubdirectory(options, "data", "resources.db"); - RocksDBModule storeModule = RocksDBModule.withConfig().filePath(databaseFile.getPath()).build(); - this.db = Nitrite.builder() - .loadModule(storeModule) - .loadModule(new SpatialModule()) - .loadModule(new JacksonMapperModule(new GeometryModule())) - .openOrCreate(); - } + this.resourcesProvider = service; + this.databaseFile = + BaseService.getFileInConfigurationSubdirectory(options, "data", "resources.db"); + RocksDBModule storeModule = RocksDBModule.withConfig().filePath(databaseFile.getPath()).build(); + this.db = + Nitrite.builder() + .loadModule(storeModule) + .loadModule(new SpatialModule()) + .loadModule(new JacksonMapperModule(new GeometryModule())) + .openOrCreate(); + } - public void shutdown() { - if (this.db != null && !this.db.isClosed()) { - this.db.close(); - } + public void shutdown() { + if (this.db != null && !this.db.isClosed()) { + this.db.close(); } + } - /** - * Find the resource with the passed URN and version and return it. - * - * @param urn can have a @version segment, in which case the version parameter can be - * null or empty. - * @param version Use {@link Version#ANY_VERSION} to obtain the latest resource revision. - * @return the resource or null - */ - public Resource getResource(String urn, Version version) { - return null; - } + /** + * Find the resource with the passed URN and version and return it. + * + * @param urn can have a @version segment, in which case the version parameter can be + * null or empty. + * @param version Use {@link Version#ANY_VERSION} to obtain the latest resource revision. + * @return the resource or null + */ + public Resource getResource(String urn, Version version) { + return null; + } - /** - * Store the passed resource with its version. Return true if this was an update of a previously stored - * resource or this is new. - * - * @param resource - * @return - */ - public boolean putResource(Resource resource) { - return false; - } + /** + * Store the passed resource with its version. Return true if this was an update of a previously + * stored resource or this is new. + * + * @param resource + * @return + */ + public boolean putResource(Resource resource) { + return false; + } - /** - * Return the status for the passed URN and version. - * - * @param urn same as in {@link #getResource(String, Version)} - * @param version same as in {@link #getResource(String, Version)} - * @return status or null - */ - public ResourceStatus getStatus(String urn, Version version) { - return null; - } + /** + * Return the status for the passed URN and version. + * + * @param urn same as in {@link #getResource(String, Version)} + * @param version same as in {@link #getResource(String, Version)} + * @return status or null + */ + public ResourceStatus getStatus(String urn, Version version) { + return null; + } - public boolean putStatus(String urn, Version version, ResourceStatus status) { - return false; - } -} \ No newline at end of file + public boolean putStatus(String urn, Version version, ResourceStatus status) { + return false; + } +} diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/storage/ResourceContextualizer.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/storage/ResourceContextualizer.java index 139612523..190493217 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/storage/ResourceContextualizer.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/storage/ResourceContextualizer.java @@ -6,48 +6,49 @@ import org.integratedmodelling.klab.common.data.Instance; /** - * One of these is created per resource contextualization request. Drives the functions in the adapter to - * create the contextualized resource payload, which is an Instance object from the Avro schema. + * One of these is created per resource contextualization request. Drives the functions in the + * adapter to create the contextualized resource payload, which is an Instance object from the Avro + * schema. */ public class ResourceContextualizer { - private final Adapter adapter; - private final Resource resource; - private final Geometry geometry; - - /** - * Pass a previously contextualized resource - * @param adapter - * @param resource - * @param geometry - */ - public ResourceContextualizer(Adapter adapter, Resource resource, Geometry geometry) { - this.adapter = adapter; - this.resource = resource; - this.geometry = geometry; - } - - /** - * Contextualize the resource to the specified geometry if the adapter provides this function. - * - * @return - */ - public Resource getContextualizedResource() { - // TODO use the adapter if it does contextualize resources - return this.resource; - } - - /** - * Produce the contextualized data from the resource in the passed geometry. Any errors will end up - * in the Instance notifications. - * - * @return - */ - public Instance getData() { - - var builder = Instance.newBuilder(); - - return builder.build(); - } - + private final Adapter adapter; + private final Resource resource; + private final Geometry geometry; + + /** + * Pass a previously contextualized resource + * + * @param adapter + * @param resource + * @param geometry + */ + public ResourceContextualizer(Adapter adapter, Resource resource, Geometry geometry) { + this.adapter = adapter; + this.resource = resource; + this.geometry = geometry; + } + + /** + * Contextualize the resource to the specified geometry if the adapter provides this function. + * + * @return + */ + public Resource getContextualizedResource() { + // TODO use the adapter if it does contextualize resources + return this.resource; + } + + /** + * Produce the contextualized data from the resource in the passed geometry. Any errors will end + * up in the Instance notifications. + * + * @return + */ + public Instance getData() { + + var builder = Instance.newBuilder(); + + return builder.build(); + } } diff --git a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/storage/WorkspaceManager.java b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/storage/WorkspaceManager.java index 878c7458f..96262c1f1 100644 --- a/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/storage/WorkspaceManager.java +++ b/klab.services.resources/src/main/java/org/integratedmodelling/klab/services/resources/storage/WorkspaceManager.java @@ -1,8 +1,19 @@ package org.integratedmodelling.klab.services.resources.storage; +import static java.util.stream.Collectors.toList; + import com.google.common.collect.Sets; import com.google.inject.Inject; import com.google.inject.Injector; +import java.io.*; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.xtext.parser.IParseResult; @@ -39,8 +50,8 @@ import org.integratedmodelling.klab.resources.FileProjectStorage; import org.integratedmodelling.klab.services.ServiceStartupOptions; import org.integratedmodelling.klab.services.base.BaseService; -import org.integratedmodelling.klab.services.resources.ResourcesProvider; import org.integratedmodelling.klab.services.configuration.ResourcesConfiguration; +import org.integratedmodelling.klab.services.resources.ResourcesProvider; import org.integratedmodelling.klab.services.resources.lang.LanguageAdapter; import org.integratedmodelling.klab.services.resources.lang.WorldviewValidationScope; import org.integratedmodelling.klab.utilities.Utils; @@ -60,2233 +71,2515 @@ import org.jgrapht.graph.DefaultEdge; import org.jgrapht.traverse.TopologicalOrderIterator; -import java.io.*; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.util.*; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Function; - -import static java.util.stream.Collectors.toList; - /** - * Singleton that separates out all the logics in managing workspaces up to and not including the loading of - * the actual knowledge into k.LAB beans. + * Singleton that separates out all the logics in managing workspaces up to and not including the + * loading of the actual knowledge into k.LAB beans. */ public class WorkspaceManager { - private final ServiceStartupOptions startupOptions; - private final ResourcesProvider service; - /** - * Default interval to check for changes in Git (15 minutes in milliseconds) - */ - private int DEFAULT_GIT_SYNC_INTERVAL_MINUTES = 15; - - - // project locks are mappings usertoken->projectName and enable remote updating of projects for one - // user at - // a time, while inhibiting file change logging in project storage - private Map projectLocks = Collections.synchronizedMap(new HashMap<>()); - private AtomicBoolean loading = new AtomicBoolean(false); - private List> _projectLoadOrder; - private List _ontologyOrder; - private Map _ontologyMap; - private List _namespaceOrder; - private Map _namespaceMap; - private List _behaviorOrder; - private Map _behaviorMap; - private List _worldviewOntologies; - private List _observationStrategies; - private List _observationStrategyDocuments; - private Map _observationStrategyDocumentMap; - // all docs that have been loaded through a URL remember the URL keyed by the document URN. No - // guarantee that all URLs correspond to a document in the current catalogs. - private Map documentURLs = new HashMap<>(); - private WorldviewImpl _worldview; - // - private AtomicBoolean consistent = new AtomicBoolean(true); - - // filled in at boot and maintained when changes happen - private WorldviewValidationScope languageValidationScope; - - private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); - private boolean worldviewProvider = false; - private String adoptedWorldview; + private final ServiceStartupOptions startupOptions; + private final ResourcesProvider service; + + /** Default interval to check for changes in Git (15 minutes in milliseconds) */ + private int DEFAULT_GIT_SYNC_INTERVAL_MINUTES = 15; + + // project locks are mappings usertoken->projectName and enable remote updating of projects for + // one + // user at + // a time, while inhibiting file change logging in project storage + private Map projectLocks = Collections.synchronizedMap(new HashMap<>()); + private AtomicBoolean loading = new AtomicBoolean(false); + private List> _projectLoadOrder; + private List _ontologyOrder; + private Map _ontologyMap; + private List _namespaceOrder; + private Map _namespaceMap; + private List _behaviorOrder; + private Map _behaviorMap; + private List _worldviewOntologies; + private List _observationStrategies; + private List _observationStrategyDocuments; + private Map _observationStrategyDocumentMap; + // all docs that have been loaded through a URL remember the URL keyed by the document URN. No + // guarantee that all URLs correspond to a document in the current catalogs. + private Map documentURLs = new HashMap<>(); + private WorldviewImpl _worldview; + // + private AtomicBoolean consistent = new AtomicBoolean(true); + + // filled in at boot and maintained when changes happen + private WorldviewValidationScope languageValidationScope; + + private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); + private boolean worldviewProvider = false; + private String adoptedWorldview; + + /** + * This includes the non-local projects, in load order + * + * @return + */ + public List getProjects() { + var ret = new ArrayList(); + for (var project : projects.values()) { + ret.add(updateStatus(project)); + } + return ret; + } + + public Project getProject(String projectName) { + return projects.get(projectName); + } + + private T updateStatus(T container) { + + if (container instanceof Workspace workspace) { + for (var project : workspace.getProjects()) { + updateProjectStatus(project.getUrn(), null); + } + } else if (container instanceof Project) { + updateProjectStatus(container.getUrn(), null); + } else if (container instanceof KlabDocument document) { + updateProjectStatus(document.getProjectName(), document); + } + + return container; + } + + private void updateProjectStatus(String projectId, KlabDocument resource) { + var pd = projectDescriptors.get(projectId); + var prj = projects.get(projectId); + if (pd.storage instanceof FileProjectStorage fps && prj instanceof ProjectImpl pimpl) { + pimpl.setRepositoryState(fps.getRepositoryState()); + // fps.updateMetadata(prj, resource, scope); + } + } + + public KimConcept.Descriptor describeConcept(String conceptUrn) { + try { + String[] split = conceptUrn.split(":"); + var ontology = getOntology(split[0]); + if (ontology != null) { + // we don't cache the concept map, so this is a potentially expensive operation and its + // results should be cached. + var declaration = conceptMap(ontology).get(split[1]); + if (declaration != null) { + var type = EnumSet.copyOf(declaration.getType()); + type.retainAll(SemanticType.DECLARABLE_TYPES); + return new KimConcept.Descriptor( + declaration.getNamespace(), + declaration.getUrn(), + type.size() == 1 ? type.iterator().next() : SemanticType.NOTHING, + declaration.getMetadata().get(Metadata.DC_COMMENT, "No description provided"), + declaration + .getMetadata() + .get(Metadata.DC_LABEL, ontology.getUrn() + ":" + declaration.getUrn()), + declaration.isAbstract()); + } + } + } catch (Throwable throwable) { + // just return null + scope.error(throwable); + } + return null; + } + + private Map conceptMap(KimOntology ontology) { + Map ret = new HashMap<>(); + for (var conceptStatement : ontology.getStatements()) { + collectConcepts(conceptStatement, ret); + } + return ret; + } + + private void collectConcepts( + KimConceptStatement conceptStatement, Map ret) { + ret.put(conceptStatement.getUrn(), conceptStatement); + for (var child : conceptStatement.getChildren()) { + collectConcepts(child, ret); + } + } + + /** + * Execute the passed operation as an atomic unit, handling any issue. All workspace-modifying + * operations called after initialization should be wrapped in this. + * + * @param runnable + */ + private synchronized void atomicOperation(Runnable runnable) { + try { + runnable.run(); + } catch (Throwable throwable) { + scope.error(throwable, Klab.ErrorContext.RESOURCES_SERVICE, Klab.ErrorCode.INTERNAL_ERROR); + } + } + + public Collection getNamespaceUrns() { + return _namespaceMap == null ? Collections.emptySet() : _namespaceMap.keySet(); + } + + public Collection getBehaviorUrns() { + return _behaviorMap == null ? Collections.emptySet() : _behaviorMap.keySet(); + } + + public URL lockProject(String urn, String token, boolean isLocal) { + + var descriptor = projectDescriptors.get(urn); + if (descriptor == null || !(descriptor.storage instanceof FileProjectStorage)) { + return null; + } + + // check and record lock + if (projectLocks.containsKey(urn) && !projectLocks.get(urn).equals(token)) { + scope.info("Lock attempt failed: project " + urn + " is already locked"); + return null; + } + + projectLocks.put(urn, token); + ((FileProjectStorage) descriptor.storage).lock(true); + scope.info("Project " + urn + " is locked"); + + if (isLocal) { + return descriptor.storage.getUrl(); + } else { + // TODO prepare a zip file and make it available through download area, return public URL + } + + return null; + } + + public boolean unlockProject(String urn, String token) { + if (projectLocks.containsKey(urn)) { + + if (projectLocks.get(urn).equals(token)) { + var descriptor = projectDescriptors.get(urn); + ((FileProjectStorage) descriptor.storage).lock(false); + projectLocks.remove(urn); + scope.info("Project " + urn + " unlocked"); - /** - * This includes the non-local projects, in load order - * - * @return - */ - public List getProjects() { - var ret = new ArrayList(); - for (var project : projects.values()) { - ret.add(updateStatus(project)); + return true; + } + } + return false; + } + + public boolean isWorldviewProvider() { + return this.worldviewProvider; + } + + public String getAdoptedWorldview() { + return this.adoptedWorldview; + } + + public List manageRepository( + String projectName, RepositoryState.Operation operation, String[] arguments) { + + List ret = new ArrayList<>(); + List notifications = new ArrayList<>(); + + var pd = projectDescriptors.get(projectName); + + if (pd != null && pd.storage instanceof FileProjectStorage fileProjectStorage) { + + var mods = + switch (operation) { + case FETCH_COMMIT_AND_PUSH -> + Utils.Git.fetchCommitAndPush( + fileProjectStorage.getRootFolder(), + arguments == null || arguments.length == 0 + ? "Committed by k.LAB resources " + "service" + : arguments[0], + scope); + case FETCH_AND_MERGE -> + Utils.Git.fetchAndMerge(fileProjectStorage.getRootFolder(), scope); + case COMMIT_AND_SWITCH -> + Utils.Git.commitAndSwitch(fileProjectStorage.getRootFolder(), arguments[0]); + case HARD_RESET -> Utils.Git.hardReset(fileProjectStorage.getRootFolder()); + case MERGE_CHANGES_FROM -> + Utils.Git.mergeChangesFrom(fileProjectStorage.getRootFolder(), arguments[0]); + }; + + List> changes = new ArrayList<>(); + if (mods != null) { + + notifications.addAll(mods.getNotifications()); + + if (!Utils.Notifications.hasErrors(mods.getNotifications())) { + + for (var path : mods.getRemovedPaths()) { + var ddata = ProjectStorage.getDocumentData(path, "/"); + if (ddata != null) { + changes.add( + Triple.of( + ddata.getFirst(), + CRUDOperation.DELETE, + fileProjectStorage.getDocumentUrl(path, "/"))); + } + } + for (var path : mods.getAddedPaths()) { + var ddata = ProjectStorage.getDocumentData(path, "/"); + if (ddata != null) { + changes.add( + Triple.of( + ddata.getFirst(), + CRUDOperation.CREATE, + fileProjectStorage.getDocumentUrl(path, "/"))); + } + } + for (var path : mods.getModifiedPaths()) { + var ddata = ProjectStorage.getDocumentData(path, "/"); + if (ddata != null) { + changes.add( + Triple.of( + ddata.getFirst(), + CRUDOperation.UPDATE, + fileProjectStorage.getDocumentUrl(path, "/"))); + } + } } - return ret; - } - - public Project getProject(String projectName) { - return projects.get(projectName); - } + } - private T updateStatus(T container) { + var repositoryState = fileProjectStorage.getRepositoryState(); - if (container instanceof Workspace workspace) { - for (var project : workspace.getProjects()) { - updateProjectStatus(project.getUrn(), null); - } - } else if (container instanceof Project) { - updateProjectStatus(container.getUrn(), null); - } else if (container instanceof KlabDocument document) { - updateProjectStatus(document.getProjectName(), document); + if (!changes.isEmpty()) { + for (var result : handleFileChange(projectName, changes)) { + result.getNotifications().addAll(notifications); + ret.add(result); + } + } else { + if (notifications.isEmpty()) { + notifications.add(Notification.info("No repository changes", UI.Interactivity.DISPLAY)); } + var result = ResourceSet.empty(); + result.getNotifications().addAll(notifications); + ret.add(result); + } - return container; - } + for (var rset : ret) { + var projectResource = new ResourceSet.Resource(); + projectResource.setResourceVersion(pd.manifest.getVersion()); + projectResource.setProjectUrn(pd.name); + projectResource.setResourceUrn(pd.name); + projectResource.setRepositoryState(repositoryState); + projectResource.setKnowledgeClass(KlabAsset.KnowledgeClass.PROJECT); + rset.getProjects().add(projectResource); + } - private void updateProjectStatus(String projectId, KlabDocument resource) { - var pd = projectDescriptors.get(projectId); - var prj = projects.get(projectId); - if (pd.storage instanceof FileProjectStorage fps && prj instanceof ProjectImpl pimpl) { - pimpl.setRepositoryState(fps.getRepositoryState()); - // fps.updateMetadata(prj, resource, scope); - } + return ret; } - public KimConcept.Descriptor describeConcept(String conceptUrn) { - try { - String[] split = conceptUrn.split(":"); - var ontology = getOntology(split[0]); - if (ontology != null) { - // we don't cache the concept map, so this is a potentially expensive operation and its - // results should be cached. - var declaration = conceptMap(ontology).get(split[1]); - if (declaration != null) { - var type = EnumSet.copyOf(declaration.getType()); - type.retainAll(SemanticType.DECLARABLE_TYPES); - return new KimConcept.Descriptor(declaration.getNamespace(), declaration.getUrn(), - type.size() == 1 ? type.iterator().next() : SemanticType.NOTHING, - declaration.getMetadata().get(Metadata.DC_COMMENT, "No description provided"), - declaration.getMetadata().get(Metadata.DC_LABEL, - ontology.getUrn() + ":" + declaration.getUrn()), - declaration.isAbstract()); - } - } - } catch (Throwable throwable) { - // just return null - scope.error(throwable); - } - return null; - } + return List.of( + ResourceSet.empty( + Notification.create( + "Project" + projectName + " not found or not " + "accessible", + Notification.Level.Error))); + } - private Map conceptMap(KimOntology ontology) { - Map ret = new HashMap<>(); - for (var conceptStatement : ontology.getStatements()) { - collectConcepts(conceptStatement, ret); - } - return ret; - } + class StrategyParser extends Parser { - private void collectConcepts(KimConceptStatement conceptStatement, Map ret) { - ret.put(conceptStatement.getUrn(), conceptStatement); - for (var child : conceptStatement.getChildren()) { - collectConcepts(child, ret); - } + @Override + protected Injector createInjector() { + return new ObservationStandaloneSetup().createInjectorAndDoEMFRegistration(); } /** - * Execute the passed operation as an atomic unit, handling any issue. All workspace-modifying operations - * called after initialization should be wrapped in this. + * Parse a concept definition into its syntactic peer, which should be inspected for errors + * before turning into semantics. * - * @param runnable + * @param strategyUrl + * @return the parsed semantic expression, or null if the parser cannot make sense of it. */ - private synchronized void atomicOperation(Runnable runnable) { - try { - runnable.run(); - } catch (Throwable throwable) { - scope.error(throwable, Klab.ErrorContext.RESOURCES_SERVICE, Klab.ErrorCode.INTERNAL_ERROR); - } - } - + public ObservationStrategiesSyntax parseStrategies(URL strategyUrl, String projectName) { - public Collection getNamespaceUrns() { - return _namespaceMap == null ? Collections.emptySet() : _namespaceMap.keySet(); - } - - public Collection getBehaviorUrns() { - return _behaviorMap == null ? Collections.emptySet() : _behaviorMap.keySet(); - } + List errors = new ArrayList<>(); - public URL lockProject(String urn, String token, boolean isLocal) { + try (var input = strategyUrl.openStream()) { + var result = parse(input, errors); - var descriptor = projectDescriptors.get(urn); - if (descriptor == null || !(descriptor.storage instanceof FileProjectStorage)) { - return null; + if (!errors.isEmpty()) { + for (var error : errors) { + scope.error( + "Observation strategy resource has errors: " + strategyUrl, + Klab.ErrorCode.RESOURCE_VALIDATION, + Klab.ErrorContext.OBSERVATION_STRATEGY); + } + return null; } - // check and record lock - if (projectLocks.containsKey(urn) && !projectLocks.get(urn).equals(token)) { - scope.info("Lock attempt failed: project " + urn + " is already locked"); - return null; - } + if (result instanceof Strategies strategies) { + return new ObservationStrategiesSyntaxImpl(strategies, languageValidationScope) { - projectLocks.put(urn, token); - ((FileProjectStorage) descriptor.storage).lock(true); - scope.info("Project " + urn + " is locked"); + @Override + protected void logWarning( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + getNotifications() + .add( + new Notification( + object, + new LanguageValidationScope.ValidationMessage( + message, -1, LanguageValidationScope.Level.WARNING))); + } - if (isLocal) { - return descriptor.storage.getUrl(); - } else { - // TODO prepare a zip file and make it available through download area, return public URL + @Override + protected void logError( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + getNotifications() + .add( + new Notification( + object, + new LanguageValidationScope.ValidationMessage( + message, -1, LanguageValidationScope.Level.ERROR))); + } + }; } - - return null; + } catch (IOException e) { + scope.error( + "Error loading observation strategy " + strategyUrl, + Klab.ErrorCode.READ_FAILED, + Klab.ErrorContext.OBSERVATION_STRATEGY); + } + return null; } + } - public boolean unlockProject(String urn, String token) { - if (projectLocks.containsKey(urn)) { + class ObservableParser extends Parser { - if (projectLocks.get(urn).equals(token)) { - var descriptor = projectDescriptors.get(urn); - ((FileProjectStorage) descriptor.storage).lock(false); - projectLocks.remove(urn); - scope.info("Project " + urn + " unlocked"); + @Inject ObservableGrammarAccess grammarAccess; - return true; - } - } - return false; + @Override + protected Injector createInjector() { + return new ObservableStandaloneSetup().createInjectorAndDoEMFRegistration(); } - public boolean isWorldviewProvider() { - return this.worldviewProvider; - } + /** + * Parse a concept definition into its syntactic peer, which should be inspected for errors + * before turning into semantics. + * + * @param conceptDefinition + * @return the parsed semantic expression, or null if the parser cannot make sense of it. + */ + public SemanticSyntax parseConcept(String conceptDefinition) { + var result = + parser.parse( + grammarAccess.getConceptExpressionRule(), new StringReader(conceptDefinition)); + var ret = result.getRootASTElement(); + if (ret instanceof ConceptExpression) { + return new SemanticSyntaxImpl( + (ConceptExpression) ret, false, null, languageValidationScope) { + + List errors = new ArrayList<>(); + + @Override + protected void logWarning( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + getNotifications() + .add( + new Notification( + object, + new LanguageValidationScope.ValidationMessage( + message, -1, LanguageValidationScope.Level.WARNING))); + } - public String getAdoptedWorldview() { - return this.adoptedWorldview; + @Override + protected void logError( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + getNotifications() + .add( + new Notification( + object, + new LanguageValidationScope.ValidationMessage( + message, -1, LanguageValidationScope.Level.ERROR))); + } + }; + } + return null; } - public List manageRepository(String projectName, RepositoryState.Operation operation, - String[] arguments) { - - List ret = new ArrayList<>(); - List notifications = new ArrayList<>(); - - var pd = projectDescriptors.get(projectName); - - if (pd != null && pd.storage instanceof FileProjectStorage fileProjectStorage) { - - var mods = switch (operation) { - case FETCH_COMMIT_AND_PUSH -> Utils.Git.fetchCommitAndPush(fileProjectStorage.getRootFolder(), - arguments == null || arguments.length == 0 ? "Committed by k.LAB resources " + - "service" : arguments[0], scope); - case FETCH_AND_MERGE -> Utils.Git.fetchAndMerge(fileProjectStorage.getRootFolder(), scope); - case COMMIT_AND_SWITCH -> - Utils.Git.commitAndSwitch(fileProjectStorage.getRootFolder(), arguments[0]); - case HARD_RESET -> Utils.Git.hardReset(fileProjectStorage.getRootFolder()); - case MERGE_CHANGES_FROM -> - Utils.Git.mergeChangesFrom(fileProjectStorage.getRootFolder(), arguments[0]); - }; - - List> changes = new ArrayList<>(); - if (mods != null) { - - notifications.addAll(mods.getNotifications()); - - if (!Utils.Notifications.hasErrors(mods.getNotifications())) { - - for (var path : mods.getRemovedPaths()) { - var ddata = ProjectStorage.getDocumentData(path, "/"); - if (ddata != null) { - changes.add(Triple.of(ddata.getFirst(), CRUDOperation.DELETE, - fileProjectStorage.getDocumentUrl(path, "/"))); - } - } - for (var path : mods.getAddedPaths()) { - var ddata = ProjectStorage.getDocumentData(path, "/"); - if (ddata != null) { - changes.add(Triple.of(ddata.getFirst(), CRUDOperation.CREATE, - fileProjectStorage.getDocumentUrl(path, "/"))); - } - } - for (var path : mods.getModifiedPaths()) { - var ddata = ProjectStorage.getDocumentData(path, "/"); - if (ddata != null) { - changes.add(Triple.of(ddata.getFirst(), CRUDOperation.UPDATE, - fileProjectStorage.getDocumentUrl(path, "/"))); - } - } - } - } - - var repositoryState = fileProjectStorage.getRepositoryState(); - - if (!changes.isEmpty()) { - for (var result : handleFileChange(projectName, changes)) { - result.getNotifications().addAll(notifications); - ret.add(result); - } - } else { - if (notifications.isEmpty()) { - notifications.add(Notification.info("No repository changes", UI.Interactivity.DISPLAY)); - } - var result = ResourceSet.empty(); - result.getNotifications().addAll(notifications); - ret.add(result); - } - - for (var rset : ret) { - var projectResource = new ResourceSet.Resource(); - projectResource.setResourceVersion(pd.manifest.getVersion()); - projectResource.setProjectUrn(pd.name); - projectResource.setResourceUrn(pd.name); - projectResource.setRepositoryState(repositoryState); - projectResource.setKnowledgeClass(KlabAsset.KnowledgeClass.PROJECT); - rset.getProjects().add(projectResource); - } - - return ret; - - } - - return List.of(ResourceSet.empty(Notification.create("Project" + projectName + " not found or not " + "accessible", Notification.Level.Error))); + /** + * Parse an observable definition into its syntactic peer, which should be inspected for errors + * before turning into semantics. + * + * @param observableDefinition + * @return the parsed semantic expression, or null if the parser cannot make sense of it. + */ + public ObservableSyntax parseObservable(String observableDefinition) { + var result = + parser.parse( + grammarAccess.getObservableSemanticsRule(), new StringReader(observableDefinition)); + var ret = result.getRootASTElement(); + if (ret instanceof ObservableSemantics) { + return new ObservableSyntaxImpl((ObservableSemantics) ret, languageValidationScope) { + + List errors = new ArrayList<>(); + + @Override + protected void logWarning( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + getNotifications() + .add( + new Notification( + object, + new LanguageValidationScope.ValidationMessage( + message, -1, LanguageValidationScope.Level.WARNING))); + } + @Override + protected void logError( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + getNotifications() + .add( + new Notification( + object, + new LanguageValidationScope.ValidationMessage( + message, -1, LanguageValidationScope.Level.ERROR))); + } + }; + } + return null; } + } - class StrategyParser extends Parser { + private ObservableParser observableParser = new ObservableParser(); + private StrategyParser strategyParser = new StrategyParser(); + private Parser ontologyParser = + new Parser() { @Override protected Injector createInjector() { - return new ObservationStandaloneSetup().createInjectorAndDoEMFRegistration(); - } - - /** - * Parse a concept definition into its syntactic peer, which should be inspected for errors before - * turning into semantics. - * - * @param strategyUrl - * @return the parsed semantic expression, or null if the parser cannot make sense of it. - */ - public ObservationStrategiesSyntax parseStrategies(URL strategyUrl, String projectName) { - - List errors = new ArrayList<>(); - - try (var input = strategyUrl.openStream()) { - var result = parse(input, errors); - - if (!errors.isEmpty()) { - for (var error : errors) { - scope.error("Observation strategy resource has errors: " + strategyUrl, - Klab.ErrorCode.RESOURCE_VALIDATION, Klab.ErrorContext.OBSERVATION_STRATEGY); - } - return null; - } - - if (result instanceof Strategies strategies) { - return new ObservationStrategiesSyntaxImpl(strategies, languageValidationScope) { - - @Override - protected void logWarning(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - getNotifications().add(new Notification(object, - new LanguageValidationScope.ValidationMessage(message, -1, - LanguageValidationScope.Level.WARNING))); - } - - @Override - protected void logError(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - getNotifications().add(new Notification(object, - new LanguageValidationScope.ValidationMessage(message, -1, - LanguageValidationScope.Level.ERROR))); - } - }; - } - } catch (IOException e) { - scope.error("Error loading observation strategy " + strategyUrl, Klab.ErrorCode.READ_FAILED - , Klab.ErrorContext.OBSERVATION_STRATEGY); - } - return null; + return new WorldviewStandaloneSetup().createInjectorAndDoEMFRegistration(); } - } - - class ObservableParser extends Parser { - - @Inject - ObservableGrammarAccess grammarAccess; + }; + private Parser namespaceParser = + new Parser() { @Override protected Injector createInjector() { - return new ObservableStandaloneSetup().createInjectorAndDoEMFRegistration(); + return new KimStandaloneSetup().createInjectorAndDoEMFRegistration(); + } + }; + + private class ProjectDescriptor { + String name; + String workspace; + ProjectStorage storage; + Project externalProject; + Project.Manifest manifest; + int updateInterval; + } + + private Map workspaces = new LinkedHashMap<>(); + private final Function externalProjectResolver; + private Map projectDescriptors = new HashMap<>(); + private Map projects = new LinkedHashMap<>(); + // all logging goes through here + private Scope scope; + private ResourcesConfiguration configuration; + private Map lastProjectUpdates = new HashMap<>(); + private List> unresolvedProjects = new ArrayList<>(); + + // TODO fix the API - just pass the service, get options and scope from it like the kbox + public WorkspaceManager( + Scope scope, + ServiceStartupOptions options, + ResourcesProvider service, + Function externalProjectResolver) { + this.service = service; + this.externalProjectResolver = externalProjectResolver; + this.scope = scope; + this.startupOptions = options; + readConfiguration(options); + loadWorkspace(); + scheduler.scheduleAtFixedRate(() -> checkForProjectUpdates(), 1, 1, TimeUnit.MINUTES); + } + + private void checkForProjectUpdates() { + + synchronized (projectDescriptors) { + for (var pd : projectDescriptors.values()) { + // configured interval == 0 disables update + if (pd.storage instanceof FileProjectStorage fpd + && !fpd.isLocked() + && pd.updateInterval > 0) { + var now = System.currentTimeMillis(); + var timeToUpdate = + lastProjectUpdates.containsKey(pd.name) + ? lastProjectUpdates.get(pd.name) + ((long) pd.updateInterval * 1000 * 60) + : now; + if (timeToUpdate <= now) { + Thread.ofVirtual().start(() -> checkForProjectUpdates(pd)); + lastProjectUpdates.put(pd.name, now); + } } + } + } + } + + private void checkForProjectUpdates(ProjectDescriptor projectDescriptor) { + // TODO fetch changes and react as configured; if anything must be reloaded, lock the workspace + scope.info( + "TODO - Checking for updates in unlocked project " + + projectDescriptor.name + + ", " + + "scheduled each " + + projectDescriptor.updateInterval + + " minutes"); + } + + private void readConfiguration(ServiceStartupOptions options) { + + File config = BaseService.getFileInConfigurationDirectory(options, "resources.yaml"); + if (config.exists() && config.length() > 0 && !options.isClean()) { + this.configuration = + org.integratedmodelling.common.utils.Utils.YAML.load( + config, ResourcesConfiguration.class); + } else { + // make an empty config + this.configuration = new ResourcesConfiguration(); + this.configuration.setServicePath("resources"); + this.configuration.setLocalResourcePath("local"); + this.configuration.setPublicResourcePath("public"); + this.configuration.setServiceId(UUID.randomUUID().toString()); + saveConfiguration(); + } + + // clear existing caches (this must be reentrant and be callable again at any new import) + projectDescriptors.clear(); + + // build descriptors for all locally configured projects and workspaces + + for (var workspace : configuration.getWorkspaces().keySet()) { + + // ensure existing + if (!this.workspaces.containsKey(workspace)) { + var ws = new WorkspaceImpl(); + ws.setUrn(workspace); + this.workspaces.put(workspace, ws); + } + + // TODO must read all worldview providing projects first + + for (var projectName : configuration.getWorkspaces().get(workspace)) { + + var projectConfiguration = configuration.getProjectConfiguration().get(projectName); + var storage = + switch (projectConfiguration.getStorageType()) { + case FILE -> + new FileProjectStorage( + projectConfiguration.getLocalPath(), projectName, this::handleFileChange); + // TODO others + default -> { + scope.error( + "Project " + + projectName + + " cannot be loaded. Configuration is " + + "invalid" + + "."); + yield null; + } + }; - /** - * Parse a concept definition into its syntactic peer, which should be inspected for errors before - * turning into semantics. - * - * @param conceptDefinition - * @return the parsed semantic expression, or null if the parser cannot make sense of it. - */ - public SemanticSyntax parseConcept(String conceptDefinition) { - var result = parser.parse(grammarAccess.getConceptExpressionRule(), - new StringReader(conceptDefinition)); - var ret = result.getRootASTElement(); - if (ret instanceof ConceptExpression) { - return new SemanticSyntaxImpl((ConceptExpression) ret, false, null, languageValidationScope) { - - List errors = new ArrayList<>(); - - @Override - protected void logWarning(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - getNotifications().add(new Notification(object, - new LanguageValidationScope.ValidationMessage(message, -1, - LanguageValidationScope.Level.WARNING))); - } - - @Override - protected void logError(ParsedObject target, EObject object, EStructuralFeature feature - , String message) { - getNotifications().add(new Notification(object, - new LanguageValidationScope.ValidationMessage(message, -1, - LanguageValidationScope.Level.ERROR))); - } - }; + // TODO put this outside the workspace loop after checking for worldviews and sorting + if (storage != null) { + + ProjectDescriptor descriptor = new ProjectDescriptor(); + descriptor.storage = storage; + descriptor.manifest = readManifest(storage); + descriptor.workspace = workspace; + descriptor.name = storage.getProjectName(); + descriptor.updateInterval = projectConfiguration.getSyncIntervalMinutes(); + projectDescriptors.put(storage.getProjectName(), descriptor); + } + } + } + } + + /** + * Return all ontologies sorted in order of dependency. Automatically adapt the local ones from + * their syntactic form. Project dependencies will ensure the consistency of the result; if any of + * the ontologies is part of a missing project, return an empty list. + * + * @param worldviewOnly if true, only ontologies that are part of a project tagged as worldview + * will be returned + * @return the fully consistent known worldview or an empty list + */ + public List getOntologies(boolean worldviewOnly) { + + if (_ontologyOrder == null) { + + _worldviewOntologies = new ArrayList<>(); + _ontologyOrder = new ArrayList<>(); + _ontologyMap = new HashMap<>(); + + this.languageValidationScope = new WorldviewValidationScope(); + + Map ontologyProjects = new HashMap<>(); + Map> cache = new HashMap<>(); + Map urlCache = new HashMap<>(); + for (var pd : projectDescriptors.values()) { + var isWorldview = pd.manifest.getDefinedWorldview() != null; + if (pd.externalProject != null) { + for (var ontology : pd.externalProject.getOntologies()) { + cache.put(ontology.getUrn(), Triple.of(null, ontology, isWorldview)); + // TODO add metadata to the ontology to signify it's remote, probably a URL + } + } else { + for (var ontologyUrl : pd.storage.listResources(ProjectStorage.ResourceType.ONTOLOGY)) { + try (var input = ontologyUrl.openStream()) { + var errors = new ArrayList(); + var parsed = ontologyParser.parse(input, errors); + if (!errors.isEmpty()) { + scope.error( + "Ontology resource has errors: " + ontologyUrl, + Klab.ErrorCode.RESOURCE_VALIDATION, + Klab.ErrorContext.ONTOLOGY); + // return Collections.emptyList(); + } + urlCache.put(parsed.getNamespace().getName(), ontologyUrl); + ontologyProjects.put(parsed.getNamespace().getName(), pd.name); + cache.put(parsed.getNamespace().getName(), Triple.of(parsed, null, isWorldview)); + } catch (IOException e) { + // log error and return failure + scope.error( + "Error loading ontology " + ontologyUrl, + Klab.ErrorCode.READ_FAILED, + Klab.ErrorContext.ONTOLOGY); + // return Collections.emptyList(); } - return null; + } } - - /** - * Parse an observable definition into its syntactic peer, which should be inspected for errors before - * turning into semantics. - * - * @param observableDefinition - * @return the parsed semantic expression, or null if the parser cannot make sense of it. - */ - public ObservableSyntax parseObservable(String observableDefinition) { - var result = parser.parse(grammarAccess.getObservableSemanticsRule(), - new StringReader(observableDefinition)); - var ret = result.getRootASTElement(); - if (ret instanceof ObservableSemantics) { - return new ObservableSyntaxImpl((ObservableSemantics) ret, languageValidationScope) { - - List errors = new ArrayList<>(); - - @Override - protected void logWarning(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - getNotifications().add(new Notification(object, - new LanguageValidationScope.ValidationMessage(message, -1, - LanguageValidationScope.Level.WARNING))); - } - - @Override - protected void logError(ParsedObject target, EObject object, EStructuralFeature feature - , String message) { - getNotifications().add(new Notification(object, - new LanguageValidationScope.ValidationMessage(message, -1, - LanguageValidationScope.Level.ERROR))); - } - }; - } - return null; + } + + // we have the ontologies and there are no errors this far: now build the order and if + // something is unresolved, log error and say goodbye + Graph dependencyGraph = new DefaultDirectedGraph<>(DefaultEdge.class); + Map ontologies = new HashMap<>(); + for (String ontologyId : cache.keySet()) { + var od = cache.get(ontologyId); + dependencyGraph.addVertex(ontologyId); + if (od.getFirst() != null) { + for (var imported : od.getFirst().getNamespace().getImported()) { + dependencyGraph.addVertex(imported); + dependencyGraph.addEdge(imported, ontologyId); + } + } else { + for (var imported : od.getSecond().getImportedOntologies()) { + dependencyGraph.addVertex(imported); + dependencyGraph.addEdge(imported, ontologyId); + } } + } + + CycleDetector cycleDetector = new CycleDetector<>(dependencyGraph); + if (cycleDetector.detectCycles()) { + scope.error( + "Circular dependencies in ontology graph: cannot continue", + Klab.ErrorCode.CIRCULAR_REFERENCES, + Klab.ErrorContext.ONTOLOGY); + return Collections.emptyList(); + } + + // finish building the ontologies in the given order using a new language validator + TopologicalOrderIterator sort = + new TopologicalOrderIterator<>(dependencyGraph); + while (sort.hasNext()) { + var ontologyId = sort.next(); + var od = cache.get(ontologyId); + if (od == null) { + scope.error( + "Ontology " + + ontologyId + + " cannot be resolved either locally or " + + "through" + + " the network", + Klab.ErrorCode.UNRESOLVED_REFERENCE, + Klab.ErrorContext.ONTOLOGY); + return Collections.emptyList(); + } + AtomicBoolean errors = new AtomicBoolean(false); + List notifications = new ArrayList<>(); + var ontology = od.getSecond(); + if (ontology == null) { + var syntax = + new OntologySyntaxImpl(od.getFirst(), languageValidationScope) { - } - - private ObservableParser observableParser = new ObservableParser(); - private StrategyParser strategyParser = new StrategyParser(); - - private Parser ontologyParser = new Parser() { - @Override - protected Injector createInjector() { - return new WorldviewStandaloneSetup().createInjectorAndDoEMFRegistration(); - } - }; + @Override + protected void logWarning( + ParsedObject target, + EObject object, + EStructuralFeature feature, + String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level + .Warning)); + } - private Parser namespaceParser = new Parser() { - @Override - protected Injector createInjector() { - return new KimStandaloneSetup().createInjectorAndDoEMFRegistration(); - } - }; - - private class ProjectDescriptor { - String name; - String workspace; - ProjectStorage storage; - Project externalProject; - Project.Manifest manifest; - int updateInterval; - } - - private Map workspaces = new LinkedHashMap<>(); - private final Function externalProjectResolver; - private Map projectDescriptors = new HashMap<>(); - private Map projects = new LinkedHashMap<>(); - // all logging goes through here - private Scope scope; - private ResourcesConfiguration configuration; - private Map lastProjectUpdates = new HashMap<>(); - private List> unresolvedProjects = new ArrayList<>(); - - // TODO fix the API - just pass the service, get options and scope from it like the kbox - public WorkspaceManager(Scope scope, ServiceStartupOptions options, ResourcesProvider service, - Function externalProjectResolver) { - this.service = service; - this.externalProjectResolver = externalProjectResolver; - this.scope = scope; - this.startupOptions = options; - readConfiguration(options); - loadWorkspace(); - scheduler.scheduleAtFixedRate(() -> checkForProjectUpdates(), 1, 1, TimeUnit.MINUTES); - } - - private void checkForProjectUpdates() { - - synchronized (projectDescriptors) { - for (var pd : projectDescriptors.values()) { - // configured interval == 0 disables update - if (pd.storage instanceof FileProjectStorage fpd && !fpd.isLocked() && pd.updateInterval > 0) { - var now = System.currentTimeMillis(); - var timeToUpdate = lastProjectUpdates.containsKey(pd.name) ? - lastProjectUpdates.get(pd.name) + ((long) pd.updateInterval * 1000 * 60) : now; - if (timeToUpdate <= now) { - Thread.ofVirtual().start(() -> checkForProjectUpdates(pd)); - lastProjectUpdates.put(pd.name, now); - } + @Override + protected void logError( + ParsedObject target, + EObject object, + EStructuralFeature feature, + String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level + .Error)); + errors.set(true); } + }; + ontology = + LanguageAdapter.INSTANCE.adaptOntology( + syntax, ontologyProjects.get(syntax.getName()), notifications); + documentURLs.put(ontology.getUrn(), urlCache.get(ontology.getUrn())); + } + + if (errors.get()) { + scope.error( + "Logical errors in ontology " + ontologyId + ": cannot continue", + Klab.ErrorCode.RESOURCE_VALIDATION, + Klab.ErrorContext.ONTOLOGY); + // return Collections.emptyList(); + } + + languageValidationScope.addNamespace(ontology); + + this._ontologyOrder.add(ontology); + this._ontologyMap.put(ontology.getUrn(), ontology); + if (od.getThird()) { + this._worldviewOntologies.add(ontology); + } + } + } + + return worldviewOnly ? _worldviewOntologies : _ontologyOrder; + } + + /** + * Return all the namespaces in order of dependency. Resolution is internal like in {@link + * #getOntologies(boolean)}. + * + * @return + */ + public List getNamespaces() { + + if (_namespaceOrder == null) { + _namespaceOrder = new ArrayList<>(); + _namespaceMap = new HashMap<>(); + + Map kimProjects = new HashMap<>(); + Map> cache = new HashMap<>(); + Map urlCache = new HashMap<>(); + for (var pd : projectDescriptors.values()) { + if (pd.externalProject == null) { + for (var namespaceUrl : + pd.storage.listResources(ProjectStorage.ResourceType.MODEL_NAMESPACE)) { + try (var input = namespaceUrl.openStream()) { + var errors = new ArrayList(); + var parsed = namespaceParser.parse(input, errors); + if (!errors.isEmpty()) { + scope.error( + "Namespace resource has errors: " + namespaceUrl, + Klab.ErrorCode.RESOURCE_VALIDATION, + Klab.ErrorContext.NAMESPACE); + // return Collections.emptyList(); + } + urlCache.put(parsed.getNamespace().getName(), namespaceUrl); + kimProjects.put(parsed.getNamespace().getName(), pd.name); + cache.put(parsed.getNamespace().getName(), Pair.of(parsed, null)); + } catch (IOException e) { + // log error and return failure + scope.error( + "Error loading namespace " + namespaceUrl, + Klab.ErrorCode.READ_FAILED, + Klab.ErrorContext.NAMESPACE); + // return Collections.emptyList(); } + } } - } - - private void checkForProjectUpdates(ProjectDescriptor projectDescriptor) { - // TODO fetch changes and react as configured; if anything must be reloaded, lock the workspace - scope.info("TODO - Checking for updates in unlocked project " + projectDescriptor.name + ", " + - "scheduled each " + projectDescriptor.updateInterval + " minutes"); - } - - private void readConfiguration(ServiceStartupOptions options) { - - File config = BaseService.getFileInConfigurationDirectory(options, "resources.yaml"); - if (config.exists() && config.length() > 0 && !options.isClean()) { - this.configuration = org.integratedmodelling.common.utils.Utils.YAML.load(config, - ResourcesConfiguration.class); + } + + // we have the ontologies and there are no errors this far: now build the order and if + // something is unresolved, log error and say goodbye + Graph dependencyGraph = new DefaultDirectedGraph<>(DefaultEdge.class); + Map ontologies = new HashMap<>(); + for (String namespaceId : cache.keySet()) { + var od = cache.get(namespaceId); + dependencyGraph.addVertex(namespaceId); + if (od.getFirst() != null) { + for (var imported : od.getFirst().getNamespace().getImported()) { + dependencyGraph.addVertex(imported.getName()); + dependencyGraph.addEdge(imported.getName(), namespaceId); + } } else { - // make an empty config - this.configuration = new ResourcesConfiguration(); - this.configuration.setServicePath("resources"); - this.configuration.setLocalResourcePath("local"); - this.configuration.setPublicResourcePath("public"); - this.configuration.setServiceId(UUID.randomUUID().toString()); - saveConfiguration(); - } - - // clear existing caches (this must be reentrant and be callable again at any new import) - projectDescriptors.clear(); - - // build descriptors for all locally configured projects and workspaces - - for (var workspace : configuration.getWorkspaces().keySet()) { - - // ensure existing - if (!this.workspaces.containsKey(workspace)) { - var ws = new WorkspaceImpl(); - ws.setUrn(workspace); - this.workspaces.put(workspace, ws); - } - - // TODO must read all worldview providing projects first - - for (var projectName : configuration.getWorkspaces().get(workspace)) { - - var projectConfiguration = configuration.getProjectConfiguration().get(projectName); - var storage = switch (projectConfiguration.getStorageType()) { - case FILE -> new FileProjectStorage(projectConfiguration.getLocalPath(), projectName, - this::handleFileChange); - // TODO others - default -> { - scope.error("Project " + projectName + " cannot be loaded. Configuration is " + - "invalid" + "."); - yield null; - } - }; - - // TODO put this outside the workspace loop after checking for worldviews and sorting - if (storage != null) { - - ProjectDescriptor descriptor = new ProjectDescriptor(); - descriptor.storage = storage; - descriptor.manifest = readManifest(storage); - descriptor.workspace = workspace; - descriptor.name = storage.getProjectName(); - descriptor.updateInterval = projectConfiguration.getSyncIntervalMinutes(); - projectDescriptors.put(storage.getProjectName(), descriptor); - - } - - } + for (var imported : od.getSecond().getImports().keySet()) { + dependencyGraph.addVertex(imported); + dependencyGraph.addEdge(imported, namespaceId); + } } - } - - /** - * Return all ontologies sorted in order of dependency. Automatically adapt the local ones from their - * syntactic form. Project dependencies will ensure the consistency of the result; if any of the - * ontologies is part of a missing project, return an empty list. - * - * @param worldviewOnly if true, only ontologies that are part of a project tagged as worldview will be - * returned - * @return the fully consistent known worldview or an empty list - */ - public List getOntologies(boolean worldviewOnly) { - - if (_ontologyOrder == null) { - - _worldviewOntologies = new ArrayList<>(); - _ontologyOrder = new ArrayList<>(); - _ontologyMap = new HashMap<>(); - - this.languageValidationScope = new WorldviewValidationScope(); - - Map ontologyProjects = new HashMap<>(); - Map> cache = new HashMap<>(); - Map urlCache = new HashMap<>(); - for (var pd : projectDescriptors.values()) { - var isWorldview = pd.manifest.getDefinedWorldview() != null; - if (pd.externalProject != null) { - for (var ontology : pd.externalProject.getOntologies()) { - cache.put(ontology.getUrn(), Triple.of(null, ontology, isWorldview)); - // TODO add metadata to the ontology to signify it's remote, probably a URL - } - } else { - for (var ontologyUrl : pd.storage.listResources(ProjectStorage.ResourceType.ONTOLOGY)) { - try (var input = ontologyUrl.openStream()) { - var errors = new ArrayList(); - var parsed = ontologyParser.parse(input, errors); - if (!errors.isEmpty()) { - scope.error("Ontology resource has errors: " + ontologyUrl, - Klab.ErrorCode.RESOURCE_VALIDATION, Klab.ErrorContext.ONTOLOGY); - // return Collections.emptyList(); - } - urlCache.put(parsed.getNamespace().getName(), ontologyUrl); - ontologyProjects.put(parsed.getNamespace().getName(), pd.name); - cache.put(parsed.getNamespace().getName(), Triple.of(parsed, null, isWorldview)); - } catch (IOException e) { - // log error and return failure - scope.error("Error loading ontology " + ontologyUrl, Klab.ErrorCode.READ_FAILED - , Klab.ErrorContext.ONTOLOGY); - // return Collections.emptyList(); - } - } - } - } - - // we have the ontologies and there are no errors this far: now build the order and if - // something is unresolved, log error and say goodbye - Graph dependencyGraph = new DefaultDirectedGraph<>(DefaultEdge.class); - Map ontologies = new HashMap<>(); - for (String ontologyId : cache.keySet()) { - var od = cache.get(ontologyId); - dependencyGraph.addVertex(ontologyId); - if (od.getFirst() != null) { - for (var imported : od.getFirst().getNamespace().getImported()) { - dependencyGraph.addVertex(imported); - dependencyGraph.addEdge(imported, ontologyId); - } - } else { - for (var imported : od.getSecond().getImportedOntologies()) { - dependencyGraph.addVertex(imported); - dependencyGraph.addEdge(imported, ontologyId); - } - } - } - - CycleDetector cycleDetector = new CycleDetector<>(dependencyGraph); - if (cycleDetector.detectCycles()) { - scope.error("Circular dependencies in ontology graph: cannot continue", - Klab.ErrorCode.CIRCULAR_REFERENCES, Klab.ErrorContext.ONTOLOGY); - return Collections.emptyList(); - } - - - // finish building the ontologies in the given order using a new language validator - TopologicalOrderIterator sort = - new TopologicalOrderIterator<>(dependencyGraph); - while (sort.hasNext()) { - var ontologyId = sort.next(); - var od = cache.get(ontologyId); - if (od == null) { - scope.error("Ontology " + ontologyId + " cannot be resolved either locally or " + - "through" + " the network", Klab.ErrorCode.UNRESOLVED_REFERENCE, - Klab.ErrorContext.ONTOLOGY); - return Collections.emptyList(); - } - AtomicBoolean errors = new AtomicBoolean(false); - List notifications = new ArrayList<>(); - var ontology = od.getSecond(); - if (ontology == null) { - var syntax = new OntologySyntaxImpl(od.getFirst(), languageValidationScope) { - - @Override - protected void logWarning(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Warning)); - - } - - @Override - protected void logError(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Error)); - errors.set(true); - } - }; - ontology = LanguageAdapter.INSTANCE.adaptOntology(syntax, - ontologyProjects.get(syntax.getName()), notifications); - documentURLs.put(ontology.getUrn(), urlCache.get(ontology.getUrn())); - } + } + + CycleDetector cycleDetector = new CycleDetector<>(dependencyGraph); + if (cycleDetector.detectCycles()) { + scope.error( + "Circular dependencies in namespace graph: cannot continue", + Klab.ErrorCode.CIRCULAR_REFERENCES, + Klab.ErrorContext.NAMESPACE); + return Collections.emptyList(); + } + + // finish building the ontologies in the given order using a new language validator + TopologicalOrderIterator sort = + new TopologicalOrderIterator<>(dependencyGraph); + while (sort.hasNext()) { + var namespaceId = sort.next(); + var od = cache.get(namespaceId); + if (od == null) { + scope.error( + "Namespace " + + namespaceId + + " cannot be resolved either locally or " + + "through" + + " the network", + Klab.ErrorCode.UNRESOLVED_REFERENCE, + Klab.ErrorContext.ONTOLOGY); + return Collections.emptyList(); + } + AtomicBoolean errors = new AtomicBoolean(false); + List notifications = new ArrayList<>(); + var namespace = od.getSecond(); + if (namespace == null) { + var syntax = + new NamespaceSyntaxImpl(od.getFirst(), this.languageValidationScope) { - if (errors.get()) { - scope.error("Logical errors in ontology " + ontologyId + ": cannot continue", - Klab.ErrorCode.RESOURCE_VALIDATION, Klab.ErrorContext.ONTOLOGY); - // return Collections.emptyList(); + @Override + protected void logWarning( + ParsedObject target, + EObject object, + EStructuralFeature feature, + String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level + .Warning)); } - languageValidationScope.addNamespace(ontology); - - this._ontologyOrder.add(ontology); - this._ontologyMap.put(ontology.getUrn(), ontology); - if (od.getThird()) { - this._worldviewOntologies.add(ontology); + @Override + protected void logError( + ParsedObject target, + EObject object, + EStructuralFeature feature, + String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level + .Error)); + errors.set(true); } - } + }; + namespace = + LanguageAdapter.INSTANCE.adaptNamespace( + syntax, kimProjects.get(syntax.getUrn()), notifications); + documentURLs.put(namespace.getUrn(), urlCache.get(namespace.getUrn())); } - return worldviewOnly ? _worldviewOntologies : _ontologyOrder; - } - - /** - * Return all the namespaces in order of dependency. Resolution is internal like in - * {@link #getOntologies(boolean)}. - * - * @return - */ - public List getNamespaces() { - - if (_namespaceOrder == null) { - _namespaceOrder = new ArrayList<>(); - _namespaceMap = new HashMap<>(); - - Map kimProjects = new HashMap<>(); - Map> cache = new HashMap<>(); - Map urlCache = new HashMap<>(); - for (var pd : projectDescriptors.values()) { - if (pd.externalProject == null) { - for (var namespaceUrl : - pd.storage.listResources(ProjectStorage.ResourceType.MODEL_NAMESPACE)) { - try (var input = namespaceUrl.openStream()) { - var errors = new ArrayList(); - var parsed = namespaceParser.parse(input, errors); - if (!errors.isEmpty()) { - scope.error("Namespace resource has errors: " + namespaceUrl, - Klab.ErrorCode.RESOURCE_VALIDATION, Klab.ErrorContext.NAMESPACE); - // return Collections.emptyList(); - } - urlCache.put(parsed.getNamespace().getName(), namespaceUrl); - kimProjects.put(parsed.getNamespace().getName(), pd.name); - cache.put(parsed.getNamespace().getName(), Pair.of(parsed, null)); - } catch (IOException e) { - // log error and return failure - scope.error("Error loading namespace " + namespaceUrl, - Klab.ErrorCode.READ_FAILED, Klab.ErrorContext.NAMESPACE); - // return Collections.emptyList(); - } - } - } - } - - // we have the ontologies and there are no errors this far: now build the order and if - // something is unresolved, log error and say goodbye - Graph dependencyGraph = new DefaultDirectedGraph<>(DefaultEdge.class); - Map ontologies = new HashMap<>(); - for (String namespaceId : cache.keySet()) { - var od = cache.get(namespaceId); - dependencyGraph.addVertex(namespaceId); - if (od.getFirst() != null) { - for (var imported : od.getFirst().getNamespace().getImported()) { - dependencyGraph.addVertex(imported.getName()); - dependencyGraph.addEdge(imported.getName(), namespaceId); - } - } else { - for (var imported : od.getSecond().getImports().keySet()) { - dependencyGraph.addVertex(imported); - dependencyGraph.addEdge(imported, namespaceId); - } - } - } - - CycleDetector cycleDetector = new CycleDetector<>(dependencyGraph); - if (cycleDetector.detectCycles()) { - scope.error("Circular dependencies in namespace graph: cannot continue", - Klab.ErrorCode.CIRCULAR_REFERENCES, Klab.ErrorContext.NAMESPACE); - return Collections.emptyList(); - } - - - // finish building the ontologies in the given order using a new language validator - TopologicalOrderIterator sort = - new TopologicalOrderIterator<>(dependencyGraph); - while (sort.hasNext()) { - var namespaceId = sort.next(); - var od = cache.get(namespaceId); - if (od == null) { - scope.error("Namespace " + namespaceId + " cannot be resolved either locally or " + - "through" + " the network", Klab.ErrorCode.UNRESOLVED_REFERENCE, - Klab.ErrorContext.ONTOLOGY); - return Collections.emptyList(); - } - AtomicBoolean errors = new AtomicBoolean(false); - List notifications = new ArrayList<>(); - var namespace = od.getSecond(); - if (namespace == null) { - var syntax = new NamespaceSyntaxImpl(od.getFirst(), this.languageValidationScope) { - - @Override - protected void logWarning(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Warning)); - - } - - @Override - protected void logError(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Error)); - errors.set(true); - } - }; - namespace = LanguageAdapter.INSTANCE.adaptNamespace(syntax, - kimProjects.get(syntax.getUrn()), notifications); - documentURLs.put(namespace.getUrn(), urlCache.get(namespace.getUrn())); - } - - if (errors.get()) { - scope.error("Logical errors in namespace " + namespaceId + ": cannot continue", - Klab.ErrorCode.RESOURCE_VALIDATION, Klab.ErrorContext.ONTOLOGY); - // return Collections.emptyList(); - } - - this._namespaceOrder.add(namespace); - this._namespaceMap.put(namespace.getUrn(), namespace); - } + if (errors.get()) { + scope.error( + "Logical errors in namespace " + namespaceId + ": cannot continue", + Klab.ErrorCode.RESOURCE_VALIDATION, + Klab.ErrorContext.ONTOLOGY); + // return Collections.emptyList(); } - return _namespaceOrder; - } - public ResourcesConfiguration getConfiguration() { - return this.configuration; + this._namespaceOrder.add(namespace); + this._namespaceMap.put(namespace.getUrn(), namespace); + } } + return _namespaceOrder; + } - public List getBehaviors() { - if (_behaviorOrder == null) { - _behaviorOrder = new ArrayList<>(); - _behaviorMap = new HashMap<>(); - // TODO load them from all projects in dependency order, same as ontologies; fill in the URL - // cache and everything - } - return _behaviorOrder; - } - - public List getStrategyDocuments() { - - if (_observationStrategyDocuments == null) { - _observationStrategyDocuments = new ArrayList<>(); - _observationStrategyDocumentMap = new HashMap<>(); - - for (var pd : projectDescriptors.values()) { - if (pd.externalProject == null) { - - for (var strategyUrl : pd.storage.listResources(ProjectStorage.ResourceType.STRATEGY)) { - try (var input = strategyUrl.openStream()) { - - var errors = new AtomicBoolean(false); - var notams = new ArrayList(); - var parsed = strategyParser.parse(input, notams); - - if (!notams.isEmpty()) { - scope.error("Observation strategy resource has errors: " + strategyUrl, - Klab.ErrorCode.RESOURCE_VALIDATION, - Klab.ErrorContext.OBSERVATION_STRATEGY); - // return Collections.emptyList(); - } else { - - List notifications = new ArrayList<>(); - var syntax = new ObservationStrategiesSyntaxImpl(parsed, - this.languageValidationScope) { - - @Override - protected void logWarning(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - notifications.add(makeNotification(target, object, feature, message - , - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Warning)); - - } - - @Override - protected void logError(ParsedObject target, EObject object, - EStructuralFeature feature, String message) { - notifications.add(makeNotification(target, object, feature, message - , - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Error)); - errors.set(true); - } - }; - - if (!errors.get()) { - var document = LanguageAdapter.INSTANCE.adaptStrategies(syntax, pd.name - , notifications); - _observationStrategyDocuments.add(document); - _observationStrategyDocumentMap.put(document.getUrn(), document); - } - } - } catch (IOException e) { - // log error and return failure - scope.error("Error loading ontology " + strategyUrl, Klab.ErrorCode.READ_FAILED - , Klab.ErrorContext.ONTOLOGY); - } - } - } - } - } - return _observationStrategyDocuments; - } + public ResourcesConfiguration getConfiguration() { + return this.configuration; + } - public List getWorkspaceURNs() { - return new ArrayList<>(workspaces.keySet()); + public List getBehaviors() { + if (_behaviorOrder == null) { + _behaviorOrder = new ArrayList<>(); + _behaviorMap = new HashMap<>(); + // TODO load them from all projects in dependency order, same as ontologies; fill in the URL + // cache and everything } + return _behaviorOrder; + } - /** - * Create the project implementation with every namespace and manifest filled in. CAUTION this can be a - * large object. The project must exist in a local workspace; if not, null will be returned without - * error. - * - * @param projectId - * @return the filled in project or null - */ - public Project createProjectData(String projectId, String workspaceName) { + public List getStrategyDocuments() { - ProjectImpl ret = null; - var pdesc = projectDescriptors.get(projectId); - if (pdesc != null && pdesc.storage != null) { - - ret = new ProjectImpl(); - ret.setUrn(projectId); + if (_observationStrategyDocuments == null) { + _observationStrategyDocuments = new ArrayList<>(); + _observationStrategyDocumentMap = new HashMap<>(); - // TODO improve metadata with service IDs, load time, stats, any info etc. - // TODO should only add a file:/ URL if the project is local to the requester (check scope) - ret.getMetadata().put(Metadata.RESOURCES_STORAGE_URL, pdesc.storage.getUrl()); - ret.setManifest(pdesc.manifest); + for (var pd : projectDescriptors.values()) { + if (pd.externalProject == null) { - for (KimOntology ontology : getOntologies(false)) { - if (projectId.equals(ontology.getProjectName())) { - ret.getOntologies().add(ontology); - } - } + for (var strategyUrl : pd.storage.listResources(ProjectStorage.ResourceType.STRATEGY)) { + try (var input = strategyUrl.openStream()) { - for (KimObservationStrategyDocument strategyDocument : getStrategyDocuments()) { - if (projectId.equals(strategyDocument.getProjectName())) { - ret.getObservationStrategies().add(strategyDocument); - } - } + var errors = new AtomicBoolean(false); + var notams = new ArrayList(); + var parsed = strategyParser.parse(input, notams); - for (KimNamespace namespace : getNamespaces()) { - if (projectId.equals(namespace.getProjectName())) { - ret.getNamespaces().add(namespace); - } - } + if (!notams.isEmpty()) { + scope.error( + "Observation strategy resource has errors: " + strategyUrl, + Klab.ErrorCode.RESOURCE_VALIDATION, + Klab.ErrorContext.OBSERVATION_STRATEGY); + // return Collections.emptyList(); + } else { - // TODO the rest + List notifications = new ArrayList<>(); + var syntax = + new ObservationStrategiesSyntaxImpl(parsed, this.languageValidationScope) { + + @Override + protected void logWarning( + ParsedObject target, + EObject object, + EStructuralFeature feature, + String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level + .Warning)); + } + + @Override + protected void logError( + ParsedObject target, + EObject object, + EStructuralFeature feature, + String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level + .Error)); + errors.set(true); + } + }; - for (KActorsBehavior behavior : getBehaviors()) { - if (projectId.equals(behavior.getProjectName())) { - // FIXME choose based on where they belong - ret.getBehaviors().add(behavior); + if (!errors.get()) { + var document = + LanguageAdapter.INSTANCE.adaptStrategies(syntax, pd.name, notifications); + _observationStrategyDocuments.add(document); + _observationStrategyDocumentMap.put(document.getUrn(), document); } + } + } catch (IOException e) { + // log error and return failure + scope.error( + "Error loading ontology " + strategyUrl, + Klab.ErrorCode.READ_FAILED, + Klab.ErrorContext.ONTOLOGY); } - - - this.projects.put(ret.getUrn(), ret); - var project = ret; - var workspace = getWorkspace(workspaceName); - - if (workspace.getProjects().stream().anyMatch(p -> p.getUrn().equals(project.getUrn()))) { - workspace.setProjects(workspace.getProjects().stream().map(p -> p.getUrn().equals(project.getUrn()) ? project : p).collect(toList())); - } else { - workspace.getProjects().add(ret); - } - - + } } - - return ret; + } } + return _observationStrategyDocuments; + } - List getStrategies() { - return null; - } - - /** - * Import a project from a URL into the given workspace and return the associated storage. Project - * configuration must not exist already, if so it is removed and rebuilt. All project implications in the - * workspace are resolved downstream. - * - * @param projectUrl - * @param workspaceName - * @return - */ - public ProjectStorage importProject(String projectUrl, String workspaceName) { + public List getWorkspaceURNs() { + return new ArrayList<>(workspaces.keySet()); + } + /** + * Create the project implementation with every namespace and manifest filled in. CAUTION this can + * be a large object. The project must exist in a local workspace; if not, null will be returned + * without error. + * + * @param projectId + * @return the filled in project or null + */ + public Project createProjectData(String projectId, String workspaceName) { - String projectName = Utils.URLs.getURLBaseName(projectUrl); - var configuration = this.configuration.getProjectConfiguration().get(projectName); - if (configuration != null) { - scope.warn("Configuration of imported project " + projectName + " exists already: " + "import " + - "will " + "rewrite it"); - } + ProjectImpl ret = null; + var pdesc = projectDescriptors.get(projectId); + if (pdesc != null && pdesc.storage != null) { - ProjectStorage ret = null; + ret = new ProjectImpl(); + ret.setUrn(projectId); - try { + // TODO improve metadata with service IDs, load time, stats, any info etc. + // TODO should only add a file:/ URL if the project is local to the requester (check scope) + ret.getMetadata().put(Metadata.RESOURCES_STORAGE_URL, pdesc.storage.getUrl()); + ret.setManifest(pdesc.manifest); - if (Utils.Git.isRemoteGitURL(projectUrl)) { + for (KimOntology ontology : getOntologies(false)) { + if (projectId.equals(ontology.getProjectName())) { + ret.getOntologies().add(ontology); + } + } - File workspace = BaseService.getConfigurationSubdirectory(startupOptions, "workspaces"); - File projectHome = new File(workspace + File.separator + projectName); + for (KimObservationStrategyDocument strategyDocument : getStrategyDocuments()) { + if (projectId.equals(strategyDocument.getProjectName())) { + ret.getObservationStrategies().add(strategyDocument); + } + } - if (projectHome.isDirectory()) { - scope.warn("Deleting and reimporting " + projectName + " from Git repository " + projectUrl); - } + for (KimNamespace namespace : getNamespaces()) { + if (projectId.equals(namespace.getProjectName())) { + ret.getNamespaces().add(namespace); + } + } - try { - projectName = Utils.Git.clone(projectUrl, workspace, true, scope); - if (projectHome.exists()) { - ret = new FileProjectStorage(projectHome, projectName, this::handleFileChange); - } - - } catch (Throwable t) { - // just make the return value null - if (projectHome.exists()) { - Utils.Files.deleteQuietly(projectHome); - } - } + // TODO the rest - } else if (projectUrl.startsWith("http")) { - - /* - * TODO - * - * Load from another service. These projects may be served as mirrors or just - * kept to meet dependencies, according to the 'served' bit in the - * configuration. The source of truth should remain the source code, hosted in a - * single place (the remote service); mechanisms should be in place to store the - * original server and check for changes and new versions. - */ - - } else if (projectUrl.startsWith("file:") || new File(projectUrl).isFile()) { - - var file = Utils.URLs.getFileForURL(projectUrl); - if (file.isDirectory()) { - ret = new FileProjectStorage(file, projectName, this::handleFileChange); - } else if (Utils.Files.JAVA_ARCHIVE_EXTENSIONS.contains(Utils.Files.getFileExtension(file))) { - // TODO ret = read from archive - } - } - } catch (Throwable t) { - scope.error(t); - } finally { - // service.setBusy(false); + for (KActorsBehavior behavior : getBehaviors()) { + if (projectId.equals(behavior.getProjectName())) { + // FIXME choose based on where they belong + ret.getBehaviors().add(behavior); } + } - /** - * (Re)Create configuration - */ - if (ret != null) { - - configuration = new ResourcesConfiguration.ProjectConfiguration(); - configuration.setSourceUrl(projectUrl); - configuration.setWorkspaceName(workspaceName); - configuration.setSyncIntervalMinutes(DEFAULT_GIT_SYNC_INTERVAL_MINUTES); - configuration.setStorageType(ret.getType()); - /* - * Default privileges are exclusive to the service, the API can be used to change them - */ - configuration.setPrivileges(ResourcePrivileges.empty()); - if (ret instanceof FileProjectStorage fps) { - configuration.setLocalPath(fps.getRootFolder()); - } - this.configuration.getProjectConfiguration().put(ret.getProjectName(), configuration); - configuration.setWorldview(readManifest(ret).getDefinedWorldview() != null); - - Set projects = this.configuration.getWorkspaces().get(workspaceName); - if (projects == null) { - projects = new LinkedHashSet<>(); - this.configuration.getWorkspaces().put(workspaceName, projects); - } + this.projects.put(ret.getUrn(), ret); + var project = ret; + var workspace = getWorkspace(workspaceName); - projects.add(ret.getProjectName()); + if (workspace.getProjects().stream().anyMatch(p -> p.getUrn().equals(project.getUrn()))) { + workspace.setProjects( + workspace.getProjects().stream() + .map(p -> p.getUrn().equals(project.getUrn()) ? project : p) + .collect(toList())); + } else { + workspace.getProjects().add(ret); + } + } - if (!this.workspaces.containsKey(workspaceName)) { - var ws = new WorkspaceImpl(); - ws.setUrn(workspaceName); - this.workspaces.put(workspaceName, ws); - } + return ret; + } - saveConfiguration(); + List getStrategies() { + return null; + } - /* - create project descriptor - */ - ProjectDescriptor descriptor = new ProjectDescriptor(); - descriptor.storage = ret; - descriptor.manifest = readManifest(ret); - descriptor.workspace = workspaceName; - descriptor.name = ret.getProjectName(); - descriptor.updateInterval = configuration.getSyncIntervalMinutes(); - projectDescriptors.put(ret.getProjectName(), descriptor); + /** + * Import a project from a URL into the given workspace and return the associated storage. Project + * configuration must not exist already, if so it is removed and rebuilt. All project implications + * in the workspace are resolved downstream. + * + * @param projectUrl + * @param workspaceName + * @return + */ + public ProjectStorage importProject(String projectUrl, String workspaceName) { - // review all dependencies and rebuild caches - loadWorkspace(); + String projectName = Utils.URLs.getURLBaseName(projectUrl); + var configuration = this.configuration.getProjectConfiguration().get(projectName); + if (configuration != null) { + scope.warn( + "Configuration of imported project " + + projectName + + " exists already: " + + "import " + + "will " + + "rewrite it"); + } - } + ProjectStorage ret = null; + try { - return ret; - } + if (Utils.Git.isRemoteGitURL(projectUrl)) { - public Project loadProject(ProjectStorage storage, String workspaceName) { + File workspace = BaseService.getConfigurationSubdirectory(startupOptions, "workspaces"); + File projectHome = new File(workspace + File.separator + projectName); - var configuration = this.configuration.getProjectConfiguration().get(storage.getProjectName()); - if (configuration == null) { - throw new KlabResourceAccessException("project configuration for " + storage.getProjectName() + " is missing"); + if (projectHome.isDirectory()) { + scope.warn( + "Deleting and reimporting " + projectName + " from Git repository " + projectUrl); } - return createProjectData(storage.getProjectName(), workspaceName); - } - /** - * Either called automatically by the file watcher in {@link FileProjectStorage} or explicitly invoked in - * synchronous CRUD operations on projects when the project is locked by the requesting user. - * - * @param project - * @param changes - */ - public synchronized List handleFileChange(String project, - List> changes) { + try { + projectName = Utils.Git.clone(projectUrl, workspace, true, scope); + if (projectHome.exists()) { + ret = new FileProjectStorage(projectHome, projectName, this::handleFileChange); + } - if (loading.get()) { - return Collections.emptyList(); + } catch (Throwable t) { + // just make the return value null + if (projectHome.exists()) { + Utils.Files.deleteQuietly(projectHome); + } } + } else if (projectUrl.startsWith("http")) { + /* - populate the resource set changes in order of workspace affected + * TODO + * + * Load from another service. These projects may be served as mirrors or just + * kept to meet dependencies, according to the 'served' bit in the + * configuration. The source of truth should remain the source code, hosted in a + * single place (the remote service); mechanisms should be in place to store the + * original server and check for changes and new versions. */ - Map result = new LinkedHashMap<>(); - - // this may or may not end up in the result set - var worldviewChange = new ResourceSet(); - worldviewChange.setWorkspace(Worldview.WORLDVIEW_WORKSPACE_IDENTIFIER); - worldviewChange.getServices().put(configuration.getServiceId(), service.getUrl()); - var projectDescriptor = projectDescriptors.get(project); - - Set affectedOntologies = new HashSet<>(); - Set affectedNamespaces = new HashSet<>(); - Set affectedBehaviors = new HashSet<>(); - Set affectedStrategies = new HashSet<>(); - List> newAssets = new ArrayList<>(); - - boolean mustRecomputeOrder = false; - - for (var change : changes) { - - if (change.getSecond() == CRUDOperation.DELETE) { - - // there's no new asset but all the affected must be reloaded - if (projectDescriptor.storage instanceof FileProjectStorage fps) { - - String deletedUrn = fps.getDocumentUrn(change.getFirst(), change.getThird()); - - if (deletedUrn != null) { - - affectedOntologies.add(deletedUrn); - for (var ontology : getOntologies(false)) { - if (!Sets.intersection(affectedOntologies, ontology.importedNamespaces(false)).isEmpty()) { - affectedOntologies.add(ontology.getUrn()); - } - } - - affectedNamespaces.addAll(affectedOntologies); - for (var namespace : getNamespaces()) { - if (!Sets.intersection(affectedNamespaces, namespace.importedNamespaces(false)).isEmpty()) { - affectedNamespaces.add(namespace.getUrn()); - } - } - affectedNamespaces.removeAll(affectedOntologies); - - // same for strategies and behaviors - affectedBehaviors.addAll(affectedOntologies); - for (var behavior : getBehaviors()) { - if (!Sets.intersection(affectedBehaviors, behavior.importedNamespaces(false)).isEmpty()) { - affectedBehaviors.add(behavior.getUrn()); - } - } - affectedBehaviors.removeAll(affectedOntologies); - - affectedStrategies.addAll(affectedOntologies); - for (var strategies : getStrategyDocuments()) { - if (!Sets.intersection(affectedStrategies, - strategies.importedNamespaces(false)).isEmpty()) { - affectedStrategies.add(strategies.getUrn()); - } - } - affectedStrategies.removeAll(affectedOntologies); - } - } - } else if (change.getSecond() == CRUDOperation.CREATE) { - // just a new asset, nothing should be affected, let this through - KlabDocument newAsset = switch (change.getFirst()) { - case ONTOLOGY -> loadOntology(change.getThird(), project); - case MODEL_NAMESPACE -> loadNamespace(change.getThird(), project); - case BEHAVIOR -> loadBehavior(change.getThird(), project); - case STRATEGY -> loadStrategy(change.getThird(), project); - default -> null; - }; + } else if (projectUrl.startsWith("file:") || new File(projectUrl).isFile()) { + + var file = Utils.URLs.getFileForURL(projectUrl); + if (file.isDirectory()) { + ret = new FileProjectStorage(file, projectName, this::handleFileChange); + } else if (Utils.Files.JAVA_ARCHIVE_EXTENSIONS.contains( + Utils.Files.getFileExtension(file))) { + // TODO ret = read from archive + } + } + } catch (Throwable t) { + scope.error(t); + } finally { + // service.setBusy(false); + } + + /** (Re)Create configuration */ + if (ret != null) { + + configuration = new ResourcesConfiguration.ProjectConfiguration(); + configuration.setSourceUrl(projectUrl); + configuration.setWorkspaceName(workspaceName); + configuration.setSyncIntervalMinutes(DEFAULT_GIT_SYNC_INTERVAL_MINUTES); + configuration.setStorageType(ret.getType()); + /* + * Default privileges are exclusive to the service, the API can be used to change them + */ + configuration.setPrivileges(ResourcePrivileges.empty()); + if (ret instanceof FileProjectStorage fps) { + configuration.setLocalPath(fps.getRootFolder()); + } + this.configuration.getProjectConfiguration().put(ret.getProjectName(), configuration); + configuration.setWorldview(readManifest(ret).getDefinedWorldview() != null); + + Set projects = this.configuration.getWorkspaces().get(workspaceName); + if (projects == null) { + projects = new LinkedHashSet<>(); + this.configuration.getWorkspaces().put(workspaceName, projects); + } + + projects.add(ret.getProjectName()); + + if (!this.workspaces.containsKey(workspaceName)) { + var ws = new WorkspaceImpl(); + ws.setUrn(workspaceName); + this.workspaces.put(workspaceName, ws); + } + + saveConfiguration(); + + /* + create project descriptor + */ + ProjectDescriptor descriptor = new ProjectDescriptor(); + descriptor.storage = ret; + descriptor.manifest = readManifest(ret); + descriptor.workspace = workspaceName; + descriptor.name = ret.getProjectName(); + descriptor.updateInterval = configuration.getSyncIntervalMinutes(); + projectDescriptors.put(ret.getProjectName(), descriptor); + + // review all dependencies and rebuild caches + loadWorkspace(); + } + + return ret; + } + + public Project loadProject(ProjectStorage storage, String workspaceName) { + + var configuration = this.configuration.getProjectConfiguration().get(storage.getProjectName()); + if (configuration == null) { + throw new KlabResourceAccessException( + "project configuration for " + storage.getProjectName() + " is missing"); + } + return createProjectData(storage.getProjectName(), workspaceName); + } + + /** + * Either called automatically by the file watcher in {@link FileProjectStorage} or explicitly + * invoked in synchronous CRUD operations on projects when the project is locked by the requesting + * user. + * + * @param project + * @param changes + */ + public synchronized List handleFileChange( + String project, List> changes) { + + if (loading.get()) { + return Collections.emptyList(); + } + + /* + populate the resource set changes in order of workspace affected + */ + Map result = new LinkedHashMap<>(); - // TODO Add document to project. FileStorage should have added it to the repository if - // there's one + // this may or may not end up in the result set + var worldviewChange = new ResourceSet(); + worldviewChange.setWorkspace(Worldview.WORLDVIEW_WORKSPACE_IDENTIFIER); + worldviewChange.getServices().put(configuration.getServiceId(), service.getUrl()); + var projectDescriptor = projectDescriptors.get(project); - newAssets.add(newAsset); + Set affectedOntologies = new HashSet<>(); + Set affectedNamespaces = new HashSet<>(); + Set affectedBehaviors = new HashSet<>(); + Set affectedStrategies = new HashSet<>(); + List> newAssets = new ArrayList<>(); - } else { + boolean mustRecomputeOrder = false; - /* - figure out which asset is affected and load it - */ - KlabDocument newAsset = switch (change.getFirst()) { - case ONTOLOGY -> loadOntology(change.getThird(), project); - case MODEL_NAMESPACE -> loadNamespace(change.getThird(), project); - case BEHAVIOR -> loadBehavior(change.getThird(), project); - case STRATEGY -> loadStrategy(change.getThird(), project); - default -> null; - }; - - if (newAsset != null) { - - newAssets.add(newAsset); - - KlabDocument oldAsset = switch (newAsset) { - case KimOntology ontology -> getOntology(ontology.getUrn()); - case KimNamespace namespace -> getNamespace(namespace.getUrn()); - case KActorsBehavior behavior -> getBehavior(behavior.getUrn()); - case KimObservationStrategyDocument strategy -> - getStrategyDocument(strategy.getUrn()); - default -> null; - }; + for (var change : changes) { - if (oldAsset == null) { - scope.error("Internal: cannot update a non-existing document: " + change.getSecond()); - return Collections.emptyList(); - } - - /* - if the implicit or explicit import statements have changed, the full order of loading - must be - recomputed. - */ - if (!mustRecomputeOrder) { - mustRecomputeOrder = - !newAsset.importedNamespaces(false).equals(oldAsset.importedNamespaces(false)); - } - - /* - establish what needs to be reloaded and which workspaces are affected: dry run across - ontologies (if the asset is an ontology), then strategies, namespaces and behaviors. First - establish the affected ones and compile the result sets per workspace. Then send those and - start the loading based on the collected metadata in the sets. - */ - if (change.getFirst() == ProjectStorage.ResourceType.ONTOLOGY) { - affectedOntologies.add(oldAsset.getUrn()); - for (var ontology : getOntologies(false)) { - if (!Sets.intersection(affectedOntologies, ontology.importedNamespaces(false)).isEmpty()) { - affectedOntologies.add(ontology.getUrn()); - } - } - } - - if (change.getFirst() == ProjectStorage.ResourceType.ONTOLOGY || change.getFirst() == ProjectStorage.ResourceType.MODEL_NAMESPACE) { - affectedNamespaces.addAll(affectedOntologies); - affectedNamespaces.add(oldAsset.getUrn()); - for (var namespace : getNamespaces()) { - if (!Sets.intersection(affectedNamespaces, namespace.importedNamespaces(false)).isEmpty()) { - affectedNamespaces.add(namespace.getUrn()); - } - } - affectedNamespaces.removeAll(affectedOntologies); - } - - if (change.getFirst() == ProjectStorage.ResourceType.ONTOLOGY || change.getFirst() == ProjectStorage.ResourceType.MODEL_NAMESPACE || change.getFirst() == ProjectStorage.ResourceType.BEHAVIOR) { - // same for strategies and behaviors - affectedBehaviors.addAll(affectedOntologies); - affectedBehaviors.add(oldAsset.getUrn()); - - for (var behavior : getBehaviors()) { - if (!Sets.intersection(affectedBehaviors, behavior.importedNamespaces(false)).isEmpty()) { - affectedBehaviors.add(behavior.getUrn()); - } - } - affectedBehaviors.removeAll(affectedOntologies); - } - - if (change.getFirst() == ProjectStorage.ResourceType.STRATEGY) { - affectedStrategies.addAll(affectedOntologies); - affectedStrategies.add(oldAsset.getUrn()); - for (var strategies : getStrategyDocuments()) { - if (!Sets.intersection(affectedStrategies, - strategies.importedNamespaces(false)).isEmpty()) { - affectedStrategies.add(strategies.getUrn()); - } - } - affectedStrategies.removeAll(affectedOntologies); - } - } else { - // TODO report failure - } - } - } + if (change.getSecond() == CRUDOperation.DELETE) { - this.loading.set(true); + // there's no new asset but all the affected must be reloaded + if (projectDescriptor.storage instanceof FileProjectStorage fps) { - /* - make the actual change. For each modification: if - it's the modified object, reset the corresponding concept descriptors in the language - validator (if an ontology) or the kbox for the namespace. Then reload and substitute in the - ontology, worldview and namespace arrays for the modified and the affected in the order - specified by the resourcesets. - */ + String deletedUrn = fps.getDocumentUrn(change.getFirst(), change.getThird()); - if (mustRecomputeOrder) { - computeLoadOrder(); - } + if (deletedUrn != null) { - for (var newAsset : newAssets) { - - /* - compile the ResourceSets based on the (possibly new) order - */ - if (!affectedOntologies.isEmpty()) { - for (var ontology : getOntologies(false)) { - if (affectedOntologies.contains(ontology.getUrn())) { - var descriptor = addToResultSet(ontology, Workspace.EXTERNAL_WORKSPACE_URN, result); - if (_worldviewOntologies.stream().anyMatch(ont -> newAsset.getUrn().equals(ont.getUrn()))) { - worldviewChange.getOntologies().add(descriptor); - } - } - } + affectedOntologies.add(deletedUrn); + for (var ontology : getOntologies(false)) { + if (!Sets.intersection(affectedOntologies, ontology.importedNamespaces(false)) + .isEmpty()) { + affectedOntologies.add(ontology.getUrn()); + } } - if (!affectedNamespaces.isEmpty()) { - for (var namespace : getNamespaces()) { - if (affectedNamespaces.contains(namespace.getUrn())) { - addToResultSet(namespace, Workspace.EXTERNAL_WORKSPACE_URN, result); - } - } + affectedNamespaces.addAll(affectedOntologies); + for (var namespace : getNamespaces()) { + if (!Sets.intersection(affectedNamespaces, namespace.importedNamespaces(false)) + .isEmpty()) { + affectedNamespaces.add(namespace.getUrn()); + } } - - if (!affectedBehaviors.isEmpty()) { - for (var behavior : getBehaviors()) { - if (affectedBehaviors.contains(behavior.getUrn())) { - addToResultSet(behavior, Workspace.EXTERNAL_WORKSPACE_URN, result); - } - } + affectedNamespaces.removeAll(affectedOntologies); + + // same for strategies and behaviors + affectedBehaviors.addAll(affectedOntologies); + for (var behavior : getBehaviors()) { + if (!Sets.intersection(affectedBehaviors, behavior.importedNamespaces(false)) + .isEmpty()) { + affectedBehaviors.add(behavior.getUrn()); + } } - - if (!affectedStrategies.isEmpty()) { - for (var strategies : getStrategyDocuments()) { - if (affectedStrategies.contains(strategies.getUrn())) { - var descriptor = addToResultSet(strategies, Workspace.EXTERNAL_WORKSPACE_URN, result); - if (_worldviewOntologies.stream().anyMatch(ont -> newAsset.getUrn().equals(ont.getUrn()))) { - worldviewChange.getObservationStrategies().add(descriptor); - } - } - } + affectedBehaviors.removeAll(affectedOntologies); + + affectedStrategies.addAll(affectedOntologies); + for (var strategies : getStrategyDocuments()) { + if (!Sets.intersection(affectedStrategies, strategies.importedNamespaces(false)) + .isEmpty()) { + affectedStrategies.add(strategies.getUrn()); + } } + affectedStrategies.removeAll(affectedOntologies); + } + } + } else if (change.getSecond() == CRUDOperation.CREATE) { - /* - TODO reload all the affected namespaces from their source, including the language validator - and kbox, using the - possibly new order. External namespaces that depend on anything that has changed should - probably cause a warning. - */ - List> newDocuments = new ArrayList<>(); - - for (KimOntology oldOntology : _ontologyOrder) { - if (affectedOntologies.contains(oldOntology.getUrn())) { - - boolean isWorldview = - _worldviewOntologies.stream().anyMatch(o -> newAsset.getUrn().equals(o.getUrn())); - - this.languageValidationScope.clearNamespace(oldOntology.getUrn()); - var newOntology = oldOntology.getUrn().equals(newAsset.getUrn()) ? newAsset : - loadOntology(documentURLs.get(oldOntology.getUrn()), - oldOntology.getProjectName()); - this.languageValidationScope.addNamespace((KimOntology) newOntology); - newDocuments.add(newOntology); - } - } - for (var oldNamespace : _namespaceOrder) { - if (affectedNamespaces.contains(oldNamespace.getUrn())) { - newDocuments.add(oldNamespace.getUrn().equals(newAsset.getUrn()) ? newAsset : - loadNamespace(documentURLs.get(oldNamespace.getUrn()), - oldNamespace.getProjectName())); - } - } - for (var oldBehavior : _behaviorOrder) { - if (affectedBehaviors.contains(oldBehavior.getUrn())) { - newDocuments.add(oldBehavior.getUrn().equals(newAsset.getUrn()) ? newAsset : - loadBehavior(documentURLs.get(oldBehavior.getUrn()), - oldBehavior.getProjectName())); - } - } - for (var oldStrategy : _observationStrategyDocuments) { - if (affectedStrategies.contains(oldStrategy.getUrn())) { - newDocuments.add(oldStrategy.getUrn().equals(newAsset.getUrn()) ? newAsset : - loadStrategy(documentURLs.get(oldStrategy.getUrn()), - oldStrategy.getProjectName())); - } - } + // just a new asset, nothing should be affected, let this through + KlabDocument newAsset = + switch (change.getFirst()) { + case ONTOLOGY -> loadOntology(change.getThird(), project); + case MODEL_NAMESPACE -> loadNamespace(change.getThird(), project); + case BEHAVIOR -> loadBehavior(change.getThird(), project); + case STRATEGY -> loadStrategy(change.getThird(), project); + default -> null; + }; - for (var document : newDocuments) { - switch (document) { - case KimOntology ontology -> { - if (_worldviewOntologies.stream().anyMatch(o -> newAsset.getUrn().equals(o.getUrn()))) { - _worldviewOntologies = - _worldviewOntologies.stream().map(o -> o.getUrn().equals(document.getUrn()) ? - ontology : o).collect(toList()); - _worldview.setOntologies(_worldviewOntologies); - } - _ontologyOrder = - _ontologyOrder.stream().map(o -> o.getUrn().equals(document.getUrn()) ? - ontology : - o).collect(toList()); - replaceAndIndex(ontology); - } - case KimNamespace namespace -> { - _namespaceOrder = - _namespaceOrder.stream().map(o -> o.getUrn().equals(document.getUrn()) ? - namespace : o).collect(toList()); - replaceAndIndex(namespace); - } - case KActorsBehavior behavior -> { - _behaviorOrder = - _behaviorOrder.stream().map(o -> o.getUrn().equals(document.getUrn()) ? - behavior : - o).collect(toList()); - replaceAndIndex(behavior); - } - case KimObservationStrategyDocument strategies -> { - _observationStrategyDocuments = - _observationStrategyDocuments.stream().map(o -> o.getUrn().equals(document.getUrn()) ? strategies : o).collect(toList()); - _observationStrategyDocumentMap.put(strategies.getUrn(), strategies); - _worldview.setObservationStrategies(_observationStrategyDocuments); - } - default -> throw new KlabIllegalStateException("can't deal with " + document); - } - } - } + // TODO Add document to project. FileStorage should have added it to the repository if + // there's one - this.loading.set(false); + newAssets.add(newAsset); - var ret = new ArrayList(); - if (!worldviewChange.getOntologies().isEmpty() || !worldviewChange.getObservationStrategies().isEmpty()) { - ret.add(worldviewChange); - } - ret.addAll(result.values()); + } else { /* - Report a ResourceSet per workspace affected. The listening end(s) will have to request the - contents. - */ - for (var resourceSet : ret) { - scope.send(Message.MessageClass.ResourceLifecycle, Message.MessageType.WorkspaceChanged, - resourceSet); - } - - return ret; - } + figure out which asset is affected and load it + */ + KlabDocument newAsset = + switch (change.getFirst()) { + case ONTOLOGY -> loadOntology(change.getThird(), project); + case MODEL_NAMESPACE -> loadNamespace(change.getThird(), project); + case BEHAVIOR -> loadBehavior(change.getThird(), project); + case STRATEGY -> loadStrategy(change.getThird(), project); + default -> null; + }; - private void replaceAndIndex(KimNamespace namespace) { - _namespaceMap.put(namespace.getUrn(), namespace); - } + if (newAsset != null) { - private void replaceAndIndex(KActorsBehavior behavior) { - // TODO index app and component metadata for queries - _behaviorMap.put(behavior.getUrn(), behavior); - } + newAssets.add(newAsset); - private void replaceAndIndex(KimOntology ontology) { - // TODO index concept declarations for queries - _ontologyMap.put(ontology.getUrn(), ontology); - } + KlabDocument oldAsset = + switch (newAsset) { + case KimOntology ontology -> getOntology(ontology.getUrn()); + case KimNamespace namespace -> getNamespace(namespace.getUrn()); + case KActorsBehavior behavior -> getBehavior(behavior.getUrn()); + case KimObservationStrategyDocument strategy -> + getStrategyDocument(strategy.getUrn()); + default -> null; + }; - /** - * Add the document info to the result set that corresponds to the passed workspace in the passed result - * map, creating whatever is needed. If the external workspace name is given, use that for an external - * document, otherwise skip it. - * - * @param asset - * @param result - */ - private ResourceSet.Resource addToResultSet(KlabDocument asset, String externalWorkspaceId, - Map result) { - - String workspace = getWorkspaceForProject(asset.getProjectName()); - ResourceSet.Resource resource = null; - if (workspace == null) workspace = externalWorkspaceId; + if (oldAsset == null) { + scope.error("Internal: cannot update a non-existing document: " + change.getSecond()); + return Collections.emptyList(); + } - if (workspace != null) { + /* + if the implicit or explicit import statements have changed, the full order of loading + must be + recomputed. + */ + if (!mustRecomputeOrder) { + mustRecomputeOrder = + !newAsset.importedNamespaces(false).equals(oldAsset.importedNamespaces(false)); + } - ResourceSet resourceSet = result.get(workspace); - if (resourceSet == null) { - resourceSet = new ResourceSet(); - resourceSet.setWorkspace(workspace); - resourceSet.getServices().put(configuration.getServiceId(), service.getUrl()); - result.put(workspace, resourceSet); + /* + establish what needs to be reloaded and which workspaces are affected: dry run across + ontologies (if the asset is an ontology), then strategies, namespaces and behaviors. First + establish the affected ones and compile the result sets per workspace. Then send those and + start the loading based on the collected metadata in the sets. + */ + if (change.getFirst() == ProjectStorage.ResourceType.ONTOLOGY) { + affectedOntologies.add(oldAsset.getUrn()); + for (var ontology : getOntologies(false)) { + if (!Sets.intersection(affectedOntologies, ontology.importedNamespaces(false)) + .isEmpty()) { + affectedOntologies.add(ontology.getUrn()); + } } + } - resource = new ResourceSet.Resource(); - resource.setResourceUrn(asset.getUrn()); - resource.setResourceVersion(asset.getVersion()); - resource.setServiceId(configuration.getServiceId()); - resource.setKnowledgeClass(KlabAsset.classify(asset)); - resource.getNotifications().addAll(asset.getNotifications()); - if (resourceSet.getServices().containsKey(configuration.getServiceId())) { - resourceSet.getServices().put(configuration.getServiceId(), service.getUrl()); + if (change.getFirst() == ProjectStorage.ResourceType.ONTOLOGY + || change.getFirst() == ProjectStorage.ResourceType.MODEL_NAMESPACE) { + affectedNamespaces.addAll(affectedOntologies); + affectedNamespaces.add(oldAsset.getUrn()); + for (var namespace : getNamespaces()) { + if (!Sets.intersection(affectedNamespaces, namespace.importedNamespaces(false)) + .isEmpty()) { + affectedNamespaces.add(namespace.getUrn()); + } } + affectedNamespaces.removeAll(affectedOntologies); + } - /* - * Must check because a previous change may already have added this dependency - */ - if (!Utils.Resources.contains(resourceSet, resource)) { - switch (resource.getKnowledgeClass()) { - case RESOURCE -> { - // TODO - } - case NAMESPACE -> { - resourceSet.getNamespaces().add(resource); - } - case BEHAVIOR, SCRIPT, TESTCASE, APPLICATION -> { - resourceSet.getBehaviors().add(resource); - } - case ONTOLOGY -> { - resourceSet.getOntologies().add(resource); - } - case OBSERVATION_STRATEGY_DOCUMENT -> { - resourceSet.getObservationStrategies().add(resource); - } - } + if (change.getFirst() == ProjectStorage.ResourceType.ONTOLOGY + || change.getFirst() == ProjectStorage.ResourceType.MODEL_NAMESPACE + || change.getFirst() == ProjectStorage.ResourceType.BEHAVIOR) { + // same for strategies and behaviors + affectedBehaviors.addAll(affectedOntologies); + affectedBehaviors.add(oldAsset.getUrn()); + + for (var behavior : getBehaviors()) { + if (!Sets.intersection(affectedBehaviors, behavior.importedNamespaces(false)) + .isEmpty()) { + affectedBehaviors.add(behavior.getUrn()); + } } + affectedBehaviors.removeAll(affectedOntologies); + } - // resourceSet.getResources().add(resource); - - } - - return resource; - } - - /** - * Recompute from scratch the order of all known ontologies, namespaces, behaviors, strategies and - * projects - */ - private void computeLoadOrder() { - sortDocuments(_ontologyOrder, Klab.ErrorContext.ONTOLOGY); - sortDocuments(_namespaceOrder, Klab.ErrorContext.NAMESPACE); - sortDocuments(_behaviorOrder, Klab.ErrorContext.BEHAVIOR); - } - - private > void sortDocuments(List documents, - Klab.ErrorContext errorContext) { - - Graph dependencyGraph = new DefaultDirectedGraph<>(DefaultEdge.class); - Map documentMap = new HashMap<>(); - for (var document : documents) { - documentMap.put(document.getUrn(), document); - dependencyGraph.addVertex(document.getUrn()); - for (var imported : document.importedNamespaces(true)) { - dependencyGraph.addVertex(imported); - dependencyGraph.addEdge(imported, document.getUrn()); + if (change.getFirst() == ProjectStorage.ResourceType.STRATEGY) { + affectedStrategies.addAll(affectedOntologies); + affectedStrategies.add(oldAsset.getUrn()); + for (var strategies : getStrategyDocuments()) { + if (!Sets.intersection(affectedStrategies, strategies.importedNamespaces(false)) + .isEmpty()) { + affectedStrategies.add(strategies.getUrn()); + } } + affectedStrategies.removeAll(affectedOntologies); + } + } else { + // TODO report failure } - - CycleDetector cycleDetector = new CycleDetector<>(dependencyGraph); - if (cycleDetector.detectCycles()) { - scope.error("Circular dependencies in workspace: cannot continue. Cyclic dependencies " + - "affect " + cycleDetector.findCycles(), Klab.ErrorCode.CIRCULAR_REFERENCES, errorContext); - return; - } - - // finish building the ontologies in the given order using a new language validator - documents.clear(); - TopologicalOrderIterator sort = new TopologicalOrderIterator<>(dependencyGraph); - while (sort.hasNext()) { - documents.add(documentMap.get(sort.next())); - } - } - - /** - * Return the nzme of the local workspace that hosts the passed project, or null. - * - * @param projectName - * @return - */ - public String getWorkspaceForProject(String projectName) { - var pd = projectDescriptors.get(projectName); - return pd == null ? null : pd.workspace; - } - - public KimOntology getOntology(String urn) { - return updateStatus(_ontologyMap.get(urn)); - } - - public KimNamespace getNamespace(String urn) { - return updateStatus(_namespaceMap.get(urn)); + } } - public KActorsBehavior getBehavior(String urn) { - return null; // TODO _ontologyMap.get(urn); - } + this.loading.set(true); - public KimObservationStrategyDocument getStrategyDocument(String urn) { - return _observationStrategyDocumentMap.get(urn); - } - - private KimOntology loadOntology(URL url, String project) { - try (var input = url.openStream()) { - List notifications = new ArrayList<>(); - var parsed = ontologyParser.parse(input, notifications); - var syntax = new OntologySyntaxImpl(parsed, languageValidationScope) { - - @Override - protected void logWarning(ParsedObject target, EObject object, EStructuralFeature feature, - String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Warning)); - } + /* + make the actual change. For each modification: if + it's the modified object, reset the corresponding concept descriptors in the language + validator (if an ontology) or the kbox for the namespace. Then reload and substitute in the + ontology, worldview and namespace arrays for the modified and the affected in the order + specified by the resourcesets. + */ - @Override - protected void logError(ParsedObject target, EObject object, EStructuralFeature feature, - String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Error)); - } - }; - return LanguageAdapter.INSTANCE.adaptOntology(syntax, project, notifications); - } catch (IOException e) { - scope.error(e); - return null; - } + if (mustRecomputeOrder) { + computeLoadOrder(); } - private KimNamespace loadNamespace(URL url, String project) { - try (var input = url.openStream()) { - List notifications = new ArrayList<>(); - var parsed = namespaceParser.parse(input, notifications); - var syntax = new NamespaceSyntaxImpl(parsed, languageValidationScope) { - - @Override - protected void logWarning(ParsedObject target, EObject object, EStructuralFeature feature, - String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Warning)); - } + for (var newAsset : newAssets) { - @Override - protected void logError(ParsedObject target, EObject object, EStructuralFeature feature, - String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Error)); - } - }; - return LanguageAdapter.INSTANCE.adaptNamespace(syntax, project, notifications); - } catch (IOException e) { - scope.error(e); + /* + compile the ResourceSets based on the (possibly new) order + */ + if (!affectedOntologies.isEmpty()) { + for (var ontology : getOntologies(false)) { + if (affectedOntologies.contains(ontology.getUrn())) { + var descriptor = addToResultSet(ontology, Workspace.EXTERNAL_WORKSPACE_URN, result); + if (_worldviewOntologies.stream() + .anyMatch(ont -> newAsset.getUrn().equals(ont.getUrn()))) { + worldviewChange.getOntologies().add(descriptor); + } + } } - return null; - } - - private KActorsBehavior loadBehavior(URL url, String project) { - // try (var input = url.openStream()) { - // List notifications = new ArrayList<>(); - // var parsed = behaviorParser.parse(input, notifications); - // var syntax = new KActorsBehaviorImpl(parsed, languageValidationScope) { - // - // @Override - // protected void logWarning(ParsedObject target, EObject object, - // EStructuralFeature - // feature, - // String message) { - // notifications.add(makeNotification(target, object, feature, message, - // org.integratedmodelling.klab.api.services.runtime.Notification.Level - // .Warning)); - // } - // - // @Override - // protected void logError(ParsedObject target, EObject object, EStructuralFeature - // feature, - // String message) { - // notifications.add(makeNotification(target, object, feature, message, - // org.integratedmodelling.klab.api.services.runtime.Notification.Level - // .Error)); - // } - // }; - // return LanguageAdapter.INSTANCE.adaptBehavior(syntax, project, notifications); - // } catch (IOException e) { - // scope.error(e); - return null; - // } - } - - private KimObservationStrategyDocument loadStrategy(URL url, String project) { - try (var input = url.openStream()) { - List notifications = new ArrayList<>(); - var parsed = strategyParser.parse(input, notifications); - var syntax = new ObservationStrategiesSyntaxImpl(parsed, languageValidationScope) { - - @Override - protected void logWarning(ParsedObject target, EObject object, EStructuralFeature feature, - String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Warning)); - } + } - @Override - protected void logError(ParsedObject target, EObject object, EStructuralFeature feature, - String message) { - notifications.add(makeNotification(target, object, feature, message, - org.integratedmodelling.klab.api.services.runtime.Notification.Level.Error)); - } - }; - return LanguageAdapter.INSTANCE.adaptStrategies(syntax, project, notifications); - } catch (IOException e) { - scope.error(e); - return null; + if (!affectedNamespaces.isEmpty()) { + for (var namespace : getNamespaces()) { + if (affectedNamespaces.contains(namespace.getUrn())) { + addToResultSet(namespace, Workspace.EXTERNAL_WORKSPACE_URN, result); + } } - } + } - /** - * TODO pass document name, type and project name to complete the LC (not needed at the moment). - * - * @param target - * @param object - * @param feature - * @param message - * @param level - * @return - */ - private Notification makeNotification(ParsedObject target, EObject object, EStructuralFeature feature, - String message, Notification.Level level) { - if (target != null) { - var context = new NotificationImpl.LexicalContextImpl(); - context.setLength(target.getCodeLength()); - context.setOffsetInDocument(target.getCodeOffset()); - // context.setUrl(target.uri()); - return Notification.create(message, level, context); + if (!affectedBehaviors.isEmpty()) { + for (var behavior : getBehaviors()) { + if (affectedBehaviors.contains(behavior.getUrn())) { + addToResultSet(behavior, Workspace.EXTERNAL_WORKSPACE_URN, result); + } } - return Notification.create(message, level); - } - - public List> getProjectLoadOrder() { - return this._projectLoadOrder; - } + } - /** - * Read, validate, resolve and sorts projects locally (all workspaces) and from the network, returning the - * load order for all projects, including local and externally resolved ones. Check errors (reported in - * the configured monitor) and unresolved projects after calling. Does not throw exceptions. - *

- * While loading the workspaces, (re)build the workspace list so that {@link #getWorkspaces()} can work. - * The workspaces are also listed in order of first-contact dependency although circular deps between - * workspaces are permitted. - * - * @return the load order or an empty collection in case of circular dependencies or no configuration. If - * errors happened they will be notified through the monitor and {@link #getUnresolvedProjects()} will - * return the list of projects that have not resolved properly (including resource not found and version - * mismatch errors). Only one of the elements in each returned pair will be non-null. - */ - public synchronized boolean loadWorkspace() { - - // clear all caches - this._projectLoadOrder = null; - this._ontologyOrder = null; - this._ontologyMap = null; - this._namespaceMap = null; - this._namespaceOrder = null; - this._observationStrategyDocuments = null; - this._observationStrategies = null; - this._behaviorMap = null; - this._behaviorOrder = null; - this._worldview = null; - this.worldviewProvider = false; - - for (var workspace : configuration.getWorkspaces().keySet()) { - for (var projectName : configuration.getWorkspaces().get(workspace)) { - var descriptor = projectDescriptors.get(projectName); - if (!this.worldviewProvider && descriptor.manifest.getDefinedWorldview() != null) { - this.worldviewProvider = true; - this.adoptedWorldview = descriptor.manifest.getDefinedWorldview(); - } + if (!affectedStrategies.isEmpty()) { + for (var strategies : getStrategyDocuments()) { + if (affectedStrategies.contains(strategies.getUrn())) { + var descriptor = addToResultSet(strategies, Workspace.EXTERNAL_WORKSPACE_URN, result); + if (_worldviewOntologies.stream() + .anyMatch(ont -> newAsset.getUrn().equals(ont.getUrn()))) { + worldviewChange.getObservationStrategies().add(descriptor); } - } - - // - // for (var workspace : configuration.getWorkspaces().keySet()) { - // for (var projectName : configuration.getWorkspaces().get(workspace)) { - // var projectConfiguration = projectDescriptors.get(projectName); - // // TODO put this outside the workspace loop after checking for worldviews and - // sorting - // var project = loadProject(projectConfiguration.storage, workspace); - // projects.put(projectConfiguration.name, project); - // } - // } - /* - TODO wait until this.loading.get() is false! Could be straight in here or we could just use this - from an operation queue. API admin ops and retrievals should also ensure that they only return - when not loading. - - Use this pattern - - if(lock.compareAndSet(false, true)){ - try { - //do - } catch(Exception e){ - //error handling - } finally { - lock.set(false); } } - */ - - this.loading.set(true); - - this._projectLoadOrder = new ArrayList<>(); - this.workspaces.clear(); - this.projects.clear(); - - Graph, DefaultEdge> dependencyGraph = - new DefaultDirectedGraph<>(DefaultEdge.class); - - // first insert worldview -> project dependencies - Map, List>> wdeps = new HashMap<>(); - for (var pd : projectDescriptors.values()) { - if (pd.manifest.getDefinedWorldview() != null) { - wdeps.computeIfAbsent(Pair.of(pd.manifest.getDefinedWorldview(), pd.manifest.getVersion()), - s -> new ArrayList<>()); - } else if (pd.manifest.getWorldview() != null) { - wdeps.computeIfAbsent(Pair.of(pd.manifest.getWorldview(), - getWorldviewVersion(pd.manifest.getWorldview())), s -> new ArrayList<>()).add(Pair.of(pd.name, pd.manifest.getVersion())); + } + + /* + TODO reload all the affected namespaces from their source, including the language validator + and kbox, using the + possibly new order. External namespaces that depend on anything that has changed should + probably cause a warning. + */ + List> newDocuments = new ArrayList<>(); + + for (KimOntology oldOntology : _ontologyOrder) { + if (affectedOntologies.contains(oldOntology.getUrn())) { + + boolean isWorldview = + _worldviewOntologies.stream().anyMatch(o -> newAsset.getUrn().equals(o.getUrn())); + + this.languageValidationScope.clearNamespace(oldOntology.getUrn()); + var newOntology = + oldOntology.getUrn().equals(newAsset.getUrn()) + ? newAsset + : loadOntology( + documentURLs.get(oldOntology.getUrn()), oldOntology.getProjectName()); + this.languageValidationScope.addNamespace((KimOntology) newOntology); + newDocuments.add(newOntology); + } + } + for (var oldNamespace : _namespaceOrder) { + if (affectedNamespaces.contains(oldNamespace.getUrn())) { + newDocuments.add( + oldNamespace.getUrn().equals(newAsset.getUrn()) + ? newAsset + : loadNamespace( + documentURLs.get(oldNamespace.getUrn()), oldNamespace.getProjectName())); + } + } + for (var oldBehavior : _behaviorOrder) { + if (affectedBehaviors.contains(oldBehavior.getUrn())) { + newDocuments.add( + oldBehavior.getUrn().equals(newAsset.getUrn()) + ? newAsset + : loadBehavior( + documentURLs.get(oldBehavior.getUrn()), oldBehavior.getProjectName())); + } + } + for (var oldStrategy : _observationStrategyDocuments) { + if (affectedStrategies.contains(oldStrategy.getUrn())) { + newDocuments.add( + oldStrategy.getUrn().equals(newAsset.getUrn()) + ? newAsset + : loadStrategy( + documentURLs.get(oldStrategy.getUrn()), oldStrategy.getProjectName())); + } + } + + for (var document : newDocuments) { + switch (document) { + case KimOntology ontology -> { + if (_worldviewOntologies.stream().anyMatch(o -> newAsset.getUrn().equals(o.getUrn()))) { + _worldviewOntologies = + _worldviewOntologies.stream() + .map(o -> o.getUrn().equals(document.getUrn()) ? ontology : o) + .collect(toList()); + _worldview.setOntologies(_worldviewOntologies); } + _ontologyOrder = + _ontologyOrder.stream() + .map(o -> o.getUrn().equals(document.getUrn()) ? ontology : o) + .collect(toList()); + replaceAndIndex(ontology); + } + case KimNamespace namespace -> { + _namespaceOrder = + _namespaceOrder.stream() + .map(o -> o.getUrn().equals(document.getUrn()) ? namespace : o) + .collect(toList()); + replaceAndIndex(namespace); + } + case KActorsBehavior behavior -> { + _behaviorOrder = + _behaviorOrder.stream() + .map(o -> o.getUrn().equals(document.getUrn()) ? behavior : o) + .collect(toList()); + replaceAndIndex(behavior); + } + case KimObservationStrategyDocument strategies -> { + _observationStrategyDocuments = + _observationStrategyDocuments.stream() + .map(o -> o.getUrn().equals(document.getUrn()) ? strategies : o) + .collect(toList()); + _observationStrategyDocumentMap.put(strategies.getUrn(), strategies); + _worldview.setObservationStrategies(_observationStrategyDocuments); + } + default -> throw new KlabIllegalStateException("can't deal with " + document); + } + } + } + + this.loading.set(false); + + var ret = new ArrayList(); + if (!worldviewChange.getOntologies().isEmpty() + || !worldviewChange.getObservationStrategies().isEmpty()) { + ret.add(worldviewChange); + } + ret.addAll(result.values()); + + /* + Report a ResourceSet per workspace affected. The listening end(s) will have to request the + contents. + */ + for (var resourceSet : ret) { + scope.send( + Message.MessageClass.ResourceLifecycle, + Message.MessageType.WorkspaceChanged, + resourceSet); + } + + return ret; + } + + private void replaceAndIndex(KimNamespace namespace) { + _namespaceMap.put(namespace.getUrn(), namespace); + } + + private void replaceAndIndex(KActorsBehavior behavior) { + // TODO index app and component metadata for queries + _behaviorMap.put(behavior.getUrn(), behavior); + } + + private void replaceAndIndex(KimOntology ontology) { + // TODO index concept declarations for queries + _ontologyMap.put(ontology.getUrn(), ontology); + } + + /** + * Add the document info to the result set that corresponds to the passed workspace in the passed + * result map, creating whatever is needed. If the external workspace name is given, use that for + * an external document, otherwise skip it. + * + * @param asset + * @param result + */ + private ResourceSet.Resource addToResultSet( + KlabDocument asset, String externalWorkspaceId, Map result) { + + String workspace = getWorkspaceForProject(asset.getProjectName()); + ResourceSet.Resource resource = null; + if (workspace == null) workspace = externalWorkspaceId; + + if (workspace != null) { + + ResourceSet resourceSet = result.get(workspace); + if (resourceSet == null) { + resourceSet = new ResourceSet(); + resourceSet.setWorkspace(workspace); + resourceSet.getServices().put(configuration.getServiceId(), service.getUrl()); + result.put(workspace, resourceSet); + } + + resource = new ResourceSet.Resource(); + resource.setResourceUrn(asset.getUrn()); + resource.setResourceVersion(asset.getVersion()); + resource.setServiceId(configuration.getServiceId()); + resource.setKnowledgeClass(KlabAsset.classify(asset)); + resource.getNotifications().addAll(asset.getNotifications()); + if (resourceSet.getServices().containsKey(configuration.getServiceId())) { + resourceSet.getServices().put(configuration.getServiceId(), service.getUrl()); + } + + /* + * Must check because a previous change may already have added this dependency + */ + if (!Utils.Resources.contains(resourceSet, resource)) { + switch (resource.getKnowledgeClass()) { + case RESOURCE -> { + // TODO + } + case NAMESPACE -> { + resourceSet.getNamespaces().add(resource); + } + case BEHAVIOR, SCRIPT, TESTCASE, APPLICATION -> { + resourceSet.getBehaviors().add(resource); + } + case ONTOLOGY -> { + resourceSet.getOntologies().add(resource); + } + case OBSERVATION_STRATEGY_DOCUMENT -> { + resourceSet.getObservationStrategies().add(resource); + } } - - for (var wv : wdeps.keySet()) { - dependencyGraph.addVertex(wv); - for (var dep : wdeps.get(wv)) { - dependencyGraph.addVertex(dep); - dependencyGraph.addEdge(dep, wv); + } + + // resourceSet.getResources().add(resource); + + } + + return resource; + } + + /** + * Recompute from scratch the order of all known ontologies, namespaces, behaviors, strategies and + * projects + */ + private void computeLoadOrder() { + sortDocuments(_ontologyOrder, Klab.ErrorContext.ONTOLOGY); + sortDocuments(_namespaceOrder, Klab.ErrorContext.NAMESPACE); + sortDocuments(_behaviorOrder, Klab.ErrorContext.BEHAVIOR); + } + + private > void sortDocuments( + List documents, Klab.ErrorContext errorContext) { + + Graph dependencyGraph = new DefaultDirectedGraph<>(DefaultEdge.class); + Map documentMap = new HashMap<>(); + for (var document : documents) { + documentMap.put(document.getUrn(), document); + dependencyGraph.addVertex(document.getUrn()); + for (var imported : document.importedNamespaces(true)) { + dependencyGraph.addVertex(imported); + dependencyGraph.addEdge(imported, document.getUrn()); + } + } + + CycleDetector cycleDetector = new CycleDetector<>(dependencyGraph); + if (cycleDetector.detectCycles()) { + scope.error( + "Circular dependencies in workspace: cannot continue. Cyclic dependencies " + + "affect " + + cycleDetector.findCycles(), + Klab.ErrorCode.CIRCULAR_REFERENCES, + errorContext); + return; + } + + // finish building the ontologies in the given order using a new language validator + documents.clear(); + TopologicalOrderIterator sort = + new TopologicalOrderIterator<>(dependencyGraph); + while (sort.hasNext()) { + documents.add(documentMap.get(sort.next())); + } + } + + /** + * Return the nzme of the local workspace that hosts the passed project, or null. + * + * @param projectName + * @return + */ + public String getWorkspaceForProject(String projectName) { + var pd = projectDescriptors.get(projectName); + return pd == null ? null : pd.workspace; + } + + public KimOntology getOntology(String urn) { + return updateStatus(_ontologyMap.get(urn)); + } + + public KimNamespace getNamespace(String urn) { + return updateStatus(_namespaceMap.get(urn)); + } + + public KActorsBehavior getBehavior(String urn) { + return null; // TODO _ontologyMap.get(urn); + } + + public KimObservationStrategyDocument getStrategyDocument(String urn) { + return _observationStrategyDocumentMap.get(urn); + } + + private KimOntology loadOntology(URL url, String project) { + try (var input = url.openStream()) { + List notifications = new ArrayList<>(); + var parsed = ontologyParser.parse(input, notifications); + var syntax = + new OntologySyntaxImpl(parsed, languageValidationScope) { + + @Override + protected void logWarning( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level + .Warning)); } - } - // build a version-aware dependency tree - for (String s : projectDescriptors.keySet()) { - var snode = Pair.of(s, projectDescriptors.get(s).manifest.getVersion()); - dependencyGraph.addVertex(snode); - for (var dep : projectDescriptors.get(s).manifest.getPrerequisiteProjects()) { - var pnode = Pair.of(dep.getFirst(), dep.getSecond()); - dependencyGraph.addVertex(pnode); - dependencyGraph.addEdge(pnode, snode); + @Override + protected void logError( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level.Error)); } - } - - CycleDetector, DefaultEdge> cycleDetector = - new CycleDetector<>(dependencyGraph); - if (cycleDetector.detectCycles()) { - scope.error(Klab.ErrorCode.CIRCULAR_REFERENCES, Klab.ErrorContext.PROJECT, "Projects in " + - "configuration have cyclic dependencies on each other: " + "will not " + "proceed. " + - "Review " + - "configuration"); - this.loading.set(false); - return false; - } else { - - TopologicalOrderIterator, DefaultEdge> sort = - new TopologicalOrderIterator(dependencyGraph); - while (sort.hasNext()) { - var proj = sort.next(); - // verify availability - if (projectDescriptors.get(proj.getFirst()) != null) { - // local dependency: check version - var pd = projectDescriptors.get(proj.getFirst()); - if (pd.manifest.getVersion().compatible(proj.getSecond())) { - this._projectLoadOrder.add(Pair.of(pd.storage, null)); - } else { - scope.error(Klab.ErrorContext.PROJECT, Klab.ErrorCode.MISMATCHED_VERSION, "Project " - + proj.getFirst() + "@" + proj.getSecond() + " is required" + " by " + - "other " + - "projects in workspace but incompatible version " + pd.manifest.getVersion() + " " + - "is available in local workspace"); - unresolvedProjects.add(proj); - } - } else { - var externalProject = externalProjectResolver.apply(proj.getFirst()); - if (externalProject != null) { - // check version - if (externalProject.getManifest().getVersion().compatible(proj.getSecond())) { - ProjectDescriptor descriptor = new ProjectDescriptor(); - descriptor.externalProject = externalProject; - descriptor.manifest = externalProject.getManifest(); - descriptor.workspace = null; - descriptor.name = proj.getFirst(); - projectDescriptors.put(proj.getFirst(), descriptor); - this._projectLoadOrder.add(Pair.of(null, externalProject)); - } else { - scope.error(Klab.ErrorContext.PROJECT, Klab.ErrorCode.MISMATCHED_VERSION, - "Project " + proj.getFirst() + "@" + proj.getSecond() + " is " + - "required by " + - "other projects in workspace but incompatible " + "version " + externalProject.getManifest().getVersion() + " is " + "available " + "externally"); - unresolvedProjects.add(proj); - } - } else { - scope.error(Klab.ErrorContext.PROJECT, Klab.ErrorCode.UNRESOLVED_REFERENCE, - "Project " + proj.getFirst() + "@" + proj.getSecond() + " is required" + " " - + "by " + - "other projects in workspace but cannot be resolved from " + "the " + "network"); - unresolvedProjects.add(proj); - } - } + }; + return LanguageAdapter.INSTANCE.adaptOntology(syntax, project, notifications); + } catch (IOException e) { + scope.error(e); + return null; + } + } + + private KimNamespace loadNamespace(URL url, String project) { + try (var input = url.openStream()) { + List notifications = new ArrayList<>(); + var parsed = namespaceParser.parse(input, notifications); + var syntax = + new NamespaceSyntaxImpl(parsed, languageValidationScope) { + + @Override + protected void logWarning( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level + .Warning)); } - } - - /* - we have workspaces and project descriptors; load ontologies and namespaces - */ - for (var ontology : getOntologies(false)) { - replaceAndIndex(ontology); - } - for (var namespace : getNamespaces()) { - replaceAndIndex(namespace); - } - // TODO behaviors - - // build workspace and project descriptors and attribute all namespaces - for (var proj : this._projectLoadOrder) { - if (proj.getFirst() != null) { - var pdesc = projectDescriptors.get(proj.getFirst().getProjectName()); - if (pdesc != null && pdesc.storage != null) { - WorkspaceImpl ws = null; - if (pdesc.workspace != null) { - ws = this.workspaces.get(pdesc.workspace); - if (ws == null) { - ws = new WorkspaceImpl(); - ws.setUrn(pdesc.workspace); - this.workspaces.put(pdesc.workspace, ws); - } - } - var project = createProjectData(pdesc.name, pdesc.workspace); - this.projects.put(pdesc.name, project); - if (ws != null) { - if (ws.getProjects().stream().anyMatch(p -> p.getUrn().equals(project.getUrn()))) { - var newProjects = - ws.getProjects().stream().map(p -> p.getUrn().equals(project.getUrn()) ? - project : p).toList(); - ws.getProjects().clear(); - ws.getProjects().addAll(newProjects); - } else { - ws.getProjects().add(project); - } - } - } + @Override + protected void logError( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level.Error)); + } + }; + return LanguageAdapter.INSTANCE.adaptNamespace(syntax, project, notifications); + } catch (IOException e) { + scope.error(e); + } + return null; + } + + private KActorsBehavior loadBehavior(URL url, String project) { + // try (var input = url.openStream()) { + // List notifications = new ArrayList<>(); + // var parsed = behaviorParser.parse(input, notifications); + // var syntax = new KActorsBehaviorImpl(parsed, languageValidationScope) { + // + // @Override + // protected void logWarning(ParsedObject target, EObject object, + // EStructuralFeature + // feature, + // String message) { + // notifications.add(makeNotification(target, object, feature, message, + // + // org.integratedmodelling.klab.api.services.runtime.Notification.Level + // .Warning)); + // } + // + // @Override + // protected void logError(ParsedObject target, EObject object, + // EStructuralFeature + // feature, + // String message) { + // notifications.add(makeNotification(target, object, feature, message, + // + // org.integratedmodelling.klab.api.services.runtime.Notification.Level + // .Error)); + // } + // }; + // return LanguageAdapter.INSTANCE.adaptBehavior(syntax, project, notifications); + // } catch (IOException e) { + // scope.error(e); + return null; + // } + } + + private KimObservationStrategyDocument loadStrategy(URL url, String project) { + try (var input = url.openStream()) { + List notifications = new ArrayList<>(); + var parsed = strategyParser.parse(input, notifications); + var syntax = + new ObservationStrategiesSyntaxImpl(parsed, languageValidationScope) { + + @Override + protected void logWarning( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level + .Warning)); } - } - this.loading.set(false); + @Override + protected void logError( + ParsedObject target, EObject object, EStructuralFeature feature, String message) { + notifications.add( + makeNotification( + target, + object, + feature, + message, + org.integratedmodelling.klab.api.services.runtime.Notification.Level.Error)); + } + }; + return LanguageAdapter.INSTANCE.adaptStrategies(syntax, project, notifications); + } catch (IOException e) { + scope.error(e); + return null; + } + } + + /** + * TODO pass document name, type and project name to complete the LC (not needed at the moment). + * + * @param target + * @param object + * @param feature + * @param message + * @param level + * @return + */ + private Notification makeNotification( + ParsedObject target, + EObject object, + EStructuralFeature feature, + String message, + Notification.Level level) { + if (target != null) { + var context = new NotificationImpl.LexicalContextImpl(); + context.setLength(target.getCodeLength()); + context.setOffsetInDocument(target.getCodeOffset()); + // context.setUrl(target.uri()); + return Notification.create(message, level, context); + } + return Notification.create(message, level); + } + + public List> getProjectLoadOrder() { + return this._projectLoadOrder; + } + + /** + * Read, validate, resolve and sorts projects locally (all workspaces) and from the network, + * returning the load order for all projects, including local and externally resolved ones. Check + * errors (reported in the configured monitor) and unresolved projects after calling. Does not + * throw exceptions. + * + *

While loading the workspaces, (re)build the workspace list so that {@link #getWorkspaces()} + * can work. The workspaces are also listed in order of first-contact dependency although circular + * deps between workspaces are permitted. + * + * @return the load order or an empty collection in case of circular dependencies or no + * configuration. If errors happened they will be notified through the monitor and {@link + * #getUnresolvedProjects()} will return the list of projects that have not resolved properly + * (including resource not found and version mismatch errors). Only one of the elements in + * each returned pair will be non-null. + */ + public synchronized boolean loadWorkspace() { + + // clear all caches + this._projectLoadOrder = null; + this._ontologyOrder = null; + this._ontologyMap = null; + this._namespaceMap = null; + this._namespaceOrder = null; + this._observationStrategyDocuments = null; + this._observationStrategies = null; + this._behaviorMap = null; + this._behaviorOrder = null; + this._worldview = null; + this.worldviewProvider = false; + + for (var workspace : configuration.getWorkspaces().keySet()) { + for (var projectName : configuration.getWorkspaces().get(workspace)) { + var descriptor = projectDescriptors.get(projectName); + if (!this.worldviewProvider && descriptor.manifest.getDefinedWorldview() != null) { + this.worldviewProvider = true; + this.adoptedWorldview = descriptor.manifest.getDefinedWorldview(); + } + } + } - return true; + // + // for (var workspace : configuration.getWorkspaces().keySet()) { + // for (var projectName : configuration.getWorkspaces().get(workspace)) { + // var projectConfiguration = projectDescriptors.get(projectName); + // // TODO put this outside the workspace loop after checking for worldviews and + // sorting + // var project = loadProject(projectConfiguration.storage, workspace); + // projects.put(projectConfiguration.name, project); + // } + // } + /* + TODO wait until this.loading.get() is false! Could be straight in here or we could just use this + from an operation queue. API admin ops and retrievals should also ensure that they only return + when not loading. + + Use this pattern + + if(lock.compareAndSet(false, true)){ + try { + //do + } catch(Exception e){ + //error handling + } finally { + lock.set(false); + } } + */ - private Version getWorldviewVersion(String worldview) { - for (var pd : projectDescriptors.values()) { - if (worldview.equals(pd.manifest.getDefinedWorldview())) { - return pd.manifest.getVersion(); + this.loading.set(true); + + this._projectLoadOrder = new ArrayList<>(); + this.workspaces.clear(); + this.projects.clear(); + + Graph, DefaultEdge> dependencyGraph = + new DefaultDirectedGraph<>(DefaultEdge.class); + + // first insert worldview -> project dependencies + Map, List>> wdeps = new HashMap<>(); + for (var pd : projectDescriptors.values()) { + if (pd.manifest.getDefinedWorldview() != null) { + wdeps.computeIfAbsent( + Pair.of(pd.manifest.getDefinedWorldview(), pd.manifest.getVersion()), + s -> new ArrayList<>()); + } else if (pd.manifest.getWorldview() != null) { + wdeps + .computeIfAbsent( + Pair.of( + pd.manifest.getWorldview(), getWorldviewVersion(pd.manifest.getWorldview())), + s -> new ArrayList<>()) + .add(Pair.of(pd.name, pd.manifest.getVersion())); + } + } + + for (var wv : wdeps.keySet()) { + dependencyGraph.addVertex(wv); + for (var dep : wdeps.get(wv)) { + dependencyGraph.addVertex(dep); + dependencyGraph.addEdge(dep, wv); + } + } + + // build a version-aware dependency tree + for (String s : projectDescriptors.keySet()) { + var snode = Pair.of(s, projectDescriptors.get(s).manifest.getVersion()); + dependencyGraph.addVertex(snode); + for (var dep : projectDescriptors.get(s).manifest.getPrerequisiteProjects()) { + var pnode = Pair.of(dep.getFirst(), dep.getSecond()); + dependencyGraph.addVertex(pnode); + dependencyGraph.addEdge(pnode, snode); + } + } + + CycleDetector, DefaultEdge> cycleDetector = + new CycleDetector<>(dependencyGraph); + if (cycleDetector.detectCycles()) { + scope.error( + Klab.ErrorCode.CIRCULAR_REFERENCES, + Klab.ErrorContext.PROJECT, + "Projects in " + + "configuration have cyclic dependencies on each other: " + + "will not " + + "proceed. " + + "Review " + + "configuration"); + this.loading.set(false); + return false; + } else { + + TopologicalOrderIterator, DefaultEdge> sort = + new TopologicalOrderIterator(dependencyGraph); + while (sort.hasNext()) { + var proj = sort.next(); + // verify availability + if (projectDescriptors.get(proj.getFirst()) != null) { + // local dependency: check version + var pd = projectDescriptors.get(proj.getFirst()); + if (pd.manifest.getVersion().compatible(proj.getSecond())) { + this._projectLoadOrder.add(Pair.of(pd.storage, null)); + } else { + scope.error( + Klab.ErrorContext.PROJECT, + Klab.ErrorCode.MISMATCHED_VERSION, + "Project " + + proj.getFirst() + + "@" + + proj.getSecond() + + " is required" + + " by " + + "other " + + "projects in workspace but incompatible version " + + pd.manifest.getVersion() + + " " + + "is available in local workspace"); + unresolvedProjects.add(proj); + } + } else { + var externalProject = externalProjectResolver.apply(proj.getFirst()); + if (externalProject != null) { + // check version + if (externalProject.getManifest().getVersion().compatible(proj.getSecond())) { + ProjectDescriptor descriptor = new ProjectDescriptor(); + descriptor.externalProject = externalProject; + descriptor.manifest = externalProject.getManifest(); + descriptor.workspace = null; + descriptor.name = proj.getFirst(); + projectDescriptors.put(proj.getFirst(), descriptor); + this._projectLoadOrder.add(Pair.of(null, externalProject)); + } else { + scope.error( + Klab.ErrorContext.PROJECT, + Klab.ErrorCode.MISMATCHED_VERSION, + "Project " + + proj.getFirst() + + "@" + + proj.getSecond() + + " is " + + "required by " + + "other projects in workspace but incompatible " + + "version " + + externalProject.getManifest().getVersion() + + " is " + + "available " + + "externally"); + unresolvedProjects.add(proj); } + } else { + scope.error( + Klab.ErrorContext.PROJECT, + Klab.ErrorCode.UNRESOLVED_REFERENCE, + "Project " + + proj.getFirst() + + "@" + + proj.getSecond() + + " is required" + + " " + + "by " + + "other projects in workspace but cannot be resolved from " + + "the " + + "network"); + unresolvedProjects.add(proj); + } } - return Version.ANY_VERSION; + } } - private ProjectStorage newProject(String projectName, String workspaceName) { - return null; + /* + we have workspaces and project descriptors; load ontologies and namespaces + */ + for (var ontology : getOntologies(false)) { + replaceAndIndex(ontology); } - - public SemanticSyntax resolveConcept(String conceptDefinition) { - return this.observableParser.parseConcept(conceptDefinition); + for (var namespace : getNamespaces()) { + replaceAndIndex(namespace); } - public ObservableSyntax resolveObservable(String observableDefinition) { - return this.observableParser.parseObservable(observableDefinition); - } + // TODO behaviors - public boolean removeProject(String projectName) { - ResourcesConfiguration.ProjectConfiguration configuration = - this.configuration.getProjectConfiguration().get(projectName); - var project = this.projectDescriptors.remove(projectName); - if (project != null && project.storage != null) { - Workspace workspace = getWorkspace(project.workspace); - Utils.Files.deleteQuietly(configuration.getLocalPath()); - if (this.configuration.getWorkspaces().get(project.workspace) != null) { - this.configuration.getWorkspaces().get(project.workspace).remove(projectName); + // build workspace and project descriptors and attribute all namespaces + for (var proj : this._projectLoadOrder) { + if (proj.getFirst() != null) { + var pdesc = projectDescriptors.get(proj.getFirst().getProjectName()); + if (pdesc != null && pdesc.storage != null) { + WorkspaceImpl ws = null; + if (pdesc.workspace != null) { + ws = this.workspaces.get(pdesc.workspace); + if (ws == null) { + ws = new WorkspaceImpl(); + ws.setUrn(pdesc.workspace); + this.workspaces.put(pdesc.workspace, ws); + } + } + var project = createProjectData(pdesc.name, pdesc.workspace); + this.projects.put(pdesc.name, project); + if (ws != null) { + if (ws.getProjects().stream().anyMatch(p -> p.getUrn().equals(project.getUrn()))) { + var newProjects = + ws.getProjects().stream() + .map(p -> p.getUrn().equals(project.getUrn()) ? project : p) + .toList(); + ws.getProjects().clear(); + ws.getProjects().addAll(newProjects); + } else { + ws.getProjects().add(project); } - workspace.getProjects().remove(project.externalProject); - saveConfiguration(); + } } - - // rebuild all - loadWorkspace(); - - return true; - } - - private Project.Manifest readManifest(ProjectStorage project) { - return Utils.Json.load(project.listResources(ProjectStorage.ResourceType.MANIFEST).getFirst(), - ProjectImpl.ManifestImpl.class); + } } - public WorkspaceImpl getWorkspace(String workspaceName) { - return updateStatus(this.workspaces.get(workspaceName)); - } + this.loading.set(false); - public Collection getWorkspaces() { - List ret = new ArrayList<>(); - for (var wsId : configuration.getWorkspaces().keySet()) { - var workspace = getWorkspace(wsId); - ret.add(workspace); - } - return ret; - } + return true; + } - public List> getUnresolvedProjects() { - return unresolvedProjects; + private Version getWorldviewVersion(String worldview) { + for (var pd : projectDescriptors.values()) { + if (worldview.equals(pd.manifest.getDefinedWorldview())) { + return pd.manifest.getVersion(); + } } + return Version.ANY_VERSION; + } + private ProjectStorage newProject(String projectName, String workspaceName) { + return null; + } - private abstract static class Parser { + public SemanticSyntax resolveConcept(String conceptDefinition) { + return this.observableParser.parseConcept(conceptDefinition); + } - @Inject - protected IParser parser; + public ObservableSyntax resolveObservable(String observableDefinition) { + return this.observableParser.parseObservable(observableDefinition); + } - public Parser() { - createInjector().injectMembers(this); - } + public boolean removeProject(String projectName) { + ResourcesConfiguration.ProjectConfiguration configuration = + this.configuration.getProjectConfiguration().get(projectName); + var project = this.projectDescriptors.remove(projectName); + if (project != null && project.storage != null) { + Workspace workspace = getWorkspace(project.workspace); + Utils.Files.deleteQuietly(configuration.getLocalPath()); + if (this.configuration.getWorkspaces().get(project.workspace) != null) { + this.configuration.getWorkspaces().get(project.workspace).remove(projectName); + } + workspace.getProjects().remove(project.externalProject); + saveConfiguration(); + } - protected abstract Injector createInjector(); + // rebuild all + loadWorkspace(); - public T parse(InputStream input, List errors) { - return parse(new InputStreamReader(input, StandardCharsets.UTF_8), errors); - } + return true; + } - /** - * Parses data provided by an input reader using Xtext and returns the root node of the resulting - * object tree. - * - * @param reader Input reader - * @return root object node - * @throws IOException when errors occur during the parsing process - */ - public T parse(Reader reader, List errors) { - try { - IParseResult result = parser.parse(reader); - for (var error : result.getSyntaxErrors()) { - System.out.println(error); - // TODO syntax context - errors.add(Notification.create(error.getSyntaxErrorMessage().getMessage(), - Notification.Level.Error)); - } - return (T) result.getRootASTElement(); - } catch (Throwable throwable) { - errors.add(Notification.create(throwable)); - } - return null; - } - } + private Project.Manifest readManifest(ProjectStorage project) { + return Utils.Json.load( + project.listResources(ProjectStorage.ResourceType.MANIFEST).getFirst(), + ProjectImpl.ManifestImpl.class); + } - public Worldview getWorldview() { + public WorkspaceImpl getWorkspace(String workspaceName) { + return updateStatus(this.workspaces.get(workspaceName)); + } - if (_worldview == null) { + public Collection getWorkspaces() { + List ret = new ArrayList<>(); + for (var wsId : configuration.getWorkspaces().keySet()) { + var workspace = getWorkspace(wsId); + ret.add(workspace); + } + return ret; + } - _worldview = new WorldviewImpl(); - _worldview.getOntologies().addAll(getOntologies(true)); - // basic validations: non-empty, first must be root, take the worldview name from it - // go back to the projects and load all observation strategies, adding project metadata - for (var pd : projectDescriptors.values()) { - if (pd.manifest.getDefinedWorldview() == null) { - continue; - } - if (pd.externalProject != null) { - for (var strategy : pd.externalProject.getObservationStrategies()) { - _worldview.getObservationStrategies().add(strategy); - } - } else { - for (var strategyUrl : pd.storage.listResources(ProjectStorage.ResourceType.STRATEGY)) { - var parsed = strategyParser.parseStrategies(strategyUrl, pd.name); - if (parsed == null) { - _worldview.setEmpty(true); - return _worldview; - } - _worldview.getObservationStrategies().add(LanguageAdapter.INSTANCE.adaptStrategies(parsed, pd.name, List.of())); - } - } - } - } + public List> getUnresolvedProjects() { + return unresolvedProjects; + } - /* - Validate the first ontology as the root ontology and set the worldview name from it - */ - if (!_worldview.getOntologies().isEmpty()) { + private abstract static class Parser { - for (var ontology : _worldview.getOntologies()) { - if (Utils.Notifications.hasErrors(ontology.getNotifications())) { - _worldview.setEmpty(true); - scope.error("Namespace " + ontology.getUrn() + " has fatal errors: worldview " + "is " + "inconsistent"); - } - } + @Inject protected IParser parser; - KimOntology root = _worldview.getOntologies().get(0); - if (!(root.getDomain() == KimOntology.rootDomain)) { - _worldview.setEmpty(true); - scope.error("The first namespace in the worldview is not the root namespace: worldview " + - "is inconsistent"); - } else { - _worldview.setUrn(root.getUrn()); - } - } else { - _worldview.setEmpty(true); - } - - return _worldview; + public Parser() { + createInjector().injectMembers(this); } - private void saveConfiguration() { - File config = BaseService.getFileInConfigurationDirectory(startupOptions, "resources.yaml"); - org.integratedmodelling.common.utils.Utils.YAML.save(this.configuration, config); + protected abstract Injector createInjector(); + + public T parse(InputStream input, List errors) { + return parse(new InputStreamReader(input, StandardCharsets.UTF_8), errors); } /** - * TODO this one MUST ALSO update the BUILD number in the version, so that whoever uses this can see the - * changes. and reload. The build number only applies to the document and is not saved with the project. - * @param projectName - * @param documentType - * @param contents - * @param lockingScope - * @return + * Parses data provided by an input reader using Xtext and returns the root node of the + * resulting object tree. + * + * @param reader Input reader + * @return root object node + * @throws IOException when errors occur during the parsing process */ - public List updateDocument(String projectName, ProjectStorage.ResourceType documentType, - String contents, Scope lockingScope) { - - String lockingAuthorization = lockingScope.getIdentity().getId(); - List ret = new ArrayList<>(); - RepositoryState repositoryState = null; - - if (lockingAuthorization == null || !lockingAuthorization.equals(projectLocks.get(projectName))) { - throw new KlabAuthorizationException("cannot update project " + projectName + " without " + - "locking" + " it first"); - } - - var pd = projectDescriptors.get(projectName); - if (pd == null || !(pd.storage instanceof FileProjectStorage)) { - throw new KlabIllegalStateException("Cannot update a document that is not stored on the " + - "service's filesystem"); + public T parse(Reader reader, List errors) { + try { + IParseResult result = parser.parse(reader); + for (var error : result.getSyntaxErrors()) { + System.out.println(error); + // TODO syntax context + errors.add( + Notification.create( + error.getSyntaxErrorMessage().getMessage(), Notification.Level.Error)); + } + return (T) result.getRootASTElement(); + } catch (Throwable throwable) { + errors.add(Notification.create(throwable)); + } + return null; + } + } + + public Worldview getWorldview() { + + if (_worldview == null) { + + _worldview = new WorldviewImpl(); + _worldview.getOntologies().addAll(getOntologies(true)); + // basic validations: non-empty, first must be root, take the worldview name from it + // go back to the projects and load all observation strategies, adding project metadata + for (var pd : projectDescriptors.values()) { + if (pd.manifest.getDefinedWorldview() == null) { + continue; + } + if (pd.externalProject != null) { + for (var strategy : pd.externalProject.getObservationStrategies()) { + _worldview.getObservationStrategies().add(strategy); + } + } else { + for (var strategyUrl : pd.storage.listResources(ProjectStorage.ResourceType.STRATEGY)) { + var parsed = strategyParser.parseStrategies(strategyUrl, pd.name); + if (parsed == null) { + _worldview.setEmpty(true); + return _worldview; + } + _worldview + .getObservationStrategies() + .add(LanguageAdapter.INSTANCE.adaptStrategies(parsed, pd.name, List.of())); + } } + } + } - /* - file storage: modify as specified - */ - List notifications = new ArrayList<>(); - var parsed = switch (documentType) { - case ONTOLOGY -> - ontologyParser.parse(new StringReader(contents), notifications).getNamespace().getName(); - case MODEL_NAMESPACE -> - namespaceParser.parse(new StringReader(contents), notifications).getNamespace().getName(); - // case BEHAVIOR-> null; // TODO - case STRATEGY -> - strategyParser.parse(new StringReader(contents), notifications).getPreamble().getName(); - default -> throw new KlabUnimplementedException("parsing new " + documentType); + /* + Validate the first ontology as the root ontology and set the worldview name from it + */ + if (!_worldview.getOntologies().isEmpty()) { + + for (var ontology : _worldview.getOntologies()) { + if (Utils.Notifications.hasErrors(ontology.getNotifications())) { + _worldview.setEmpty(true); + scope.error( + "Namespace " + + ontology.getUrn() + + " has fatal errors: worldview " + + "is " + + "inconsistent"); + } + } + + KimOntology root = _worldview.getOntologies().get(0); + if (!(root.getDomain() == KimOntology.rootDomain)) { + _worldview.setEmpty(true); + scope.error( + "The first namespace in the worldview is not the root namespace: worldview " + + "is inconsistent"); + } else { + _worldview.setUrn(root.getUrn()); + } + } else { + _worldview.setEmpty(true); + } + + return _worldview; + } + + private void saveConfiguration() { + File config = BaseService.getFileInConfigurationDirectory(startupOptions, "resources.yaml"); + org.integratedmodelling.common.utils.Utils.YAML.save(this.configuration, config); + } + + /** + * TODO this one MUST ALSO update the BUILD number in the version, so that whoever uses this can + * see the changes. and reload. The build number only applies to the document and is not saved + * with the project. + * + * @param projectName + * @param documentType + * @param contents + * @param lockingScope + * @return + */ + public List updateDocument( + String projectName, + ProjectStorage.ResourceType documentType, + String contents, + Scope lockingScope) { + + String lockingAuthorization = lockingScope.getIdentity().getId(); + List ret = new ArrayList<>(); + RepositoryState repositoryState = null; + + if (lockingAuthorization == null + || !lockingAuthorization.equals(projectLocks.get(projectName))) { + throw new KlabAuthorizationException( + "cannot update project " + projectName + " without " + "locking" + " it first"); + } + + var pd = projectDescriptors.get(projectName); + if (pd == null || !(pd.storage instanceof FileProjectStorage)) { + throw new KlabIllegalStateException( + "Cannot update a document that is not stored on the " + "service's filesystem"); + } + + /* + file storage: modify as specified + */ + List notifications = new ArrayList<>(); + var parsed = + switch (documentType) { + case ONTOLOGY -> + ontologyParser + .parse(new StringReader(contents), notifications) + .getNamespace() + .getName(); + case MODEL_NAMESPACE -> + namespaceParser + .parse(new StringReader(contents), notifications) + .getNamespace() + .getName(); + // case BEHAVIOR-> null; // TODO + case STRATEGY -> + strategyParser + .parse(new StringReader(contents), notifications) + .getPreamble() + .getName(); + default -> throw new KlabUnimplementedException("parsing new " + documentType); }; - if (parsed != null && pd.storage instanceof FileProjectStorage fileProjectStorage) { + if (parsed != null && pd.storage instanceof FileProjectStorage fileProjectStorage) { - // do the update in the stored project and screw it - var url = fileProjectStorage.update(documentType, parsed, contents); + // do the update in the stored project and screw it + var url = fileProjectStorage.update(documentType, parsed, contents); - ret = handleFileChange(projectName, List.of(Triple.of(documentType, CRUDOperation.UPDATE, url))); + ret = + handleFileChange( + projectName, List.of(Triple.of(documentType, CRUDOperation.UPDATE, url))); - repositoryState = fileProjectStorage.getRepositoryState(); - - } - - if (repositoryState != null) { - for (var result : ret) { - var projectResource = new ResourceSet.Resource(); - projectResource.setResourceVersion(pd.manifest.getVersion()); - projectResource.setProjectUrn(pd.name); - projectResource.setResourceUrn(pd.name); - projectResource.setRepositoryState(repositoryState); - projectResource.setKnowledgeClass(KlabAsset.KnowledgeClass.PROJECT); - result.getProjects().add(projectResource); - } - } + repositoryState = fileProjectStorage.getRepositoryState(); + } - return ret; + if (repositoryState != null) { + for (var result : ret) { + var projectResource = new ResourceSet.Resource(); + projectResource.setResourceVersion(pd.manifest.getVersion()); + projectResource.setProjectUrn(pd.name); + projectResource.setResourceUrn(pd.name); + projectResource.setRepositoryState(repositoryState); + projectResource.setKnowledgeClass(KlabAsset.KnowledgeClass.PROJECT); + result.getProjects().add(projectResource); + } } - public List createDocument(String projectName, ProjectStorage.ResourceType documentType, - String documentUrn, Scope lockingScope) { + return ret; + } - List ret = new ArrayList<>(); - String lockingAuthorization = scope.getIdentity().getId(); + public List createDocument( + String projectName, + ProjectStorage.ResourceType documentType, + String documentUrn, + Scope lockingScope) { - if (lockingAuthorization == null || !lockingAuthorization.equals(projectLocks.get(projectName))) { - throw new KlabAuthorizationException("cannot update project " + projectName + " without " + - "locking" + " it first"); - } + List ret = new ArrayList<>(); + String lockingAuthorization = scope.getIdentity().getId(); - var pd = projectDescriptors.get(projectName); - if (pd == null || !(pd.storage instanceof FileProjectStorage fileProjectStorage)) { - throw new KlabIllegalStateException("Cannot create a document within a project not stored " + - "on " + "the " + "service's filesystem"); - } + if (lockingAuthorization == null + || !lockingAuthorization.equals(projectLocks.get(projectName))) { + throw new KlabAuthorizationException( + "cannot update project " + projectName + " without " + "locking" + " it first"); + } - var document = fileProjectStorage.create(documentUrn, documentType); - if (document != null) { - return handleFileChange(projectName, List.of(Triple.of(documentType, CRUDOperation.CREATE, - document))); - } - return ret; + var pd = projectDescriptors.get(projectName); + if (pd == null || !(pd.storage instanceof FileProjectStorage fileProjectStorage)) { + throw new KlabIllegalStateException( + "Cannot create a document within a project not stored " + + "on " + + "the " + + "service's filesystem"); } + var document = fileProjectStorage.create(documentUrn, documentType); + if (document != null) { + return handleFileChange( + projectName, List.of(Triple.of(documentType, CRUDOperation.CREATE, document))); + } + return ret; + } } diff --git a/klab.services.runtime.server/src/main/java/org/integratedmodelling/klab/services/runtime/server/controllers/RuntimeServerContextController.java b/klab.services.runtime.server/src/main/java/org/integratedmodelling/klab/services/runtime/server/controllers/RuntimeServerContextController.java index 0b79ffe17..c54b93a4f 100644 --- a/klab.services.runtime.server/src/main/java/org/integratedmodelling/klab/services/runtime/server/controllers/RuntimeServerContextController.java +++ b/klab.services.runtime.server/src/main/java/org/integratedmodelling/klab/services/runtime/server/controllers/RuntimeServerContextController.java @@ -21,44 +21,40 @@ import java.util.List; /** - * Provides the GraphQL support for context access, modification and inspection. The GraphQL endpoint is - * unsecured, but the result depend on the authorized identity and scope. + * Provides the GraphQL support for context access, modification and inspection. The GraphQL + * endpoint is unsecured, but the result depend on the authorized identity and scope. */ @Controller public class RuntimeServerContextController { - @Autowired - private RuntimeServer runtimeService; - @Autowired - ServiceAuthorizationManager authorizationManager; - @Autowired - private HttpServletRequest request; - - /** - * Return the authorization, including the scope referenced in the request. - * - * @return - */ - private EngineAuthorization getAuthorization() { - var authHeader = request.getHeader(HttpHeaders.AUTHORIZATION); - var observerToken = request.getHeader(ServicesAPI.SCOPE_HEADER); - var serverKey = request.getHeader(ServicesAPI.SERVER_KEY_HEADER); - return authorizationManager.validateToken(authHeader, serverKey, observerToken); - } - - @QueryMapping - public List observations() { - return List.of(); - } - - @QueryMapping - public List tasks() { - return List.of(); - } - - @QueryMapping - public List notifications(@Argument(name = "after") float after) { - return List.of(); - } - + @Autowired private RuntimeServer runtimeService; + @Autowired ServiceAuthorizationManager authorizationManager; + @Autowired private HttpServletRequest request; + + /** + * Return the authorization, including the scope referenced in the request. + * + * @return + */ + private EngineAuthorization getAuthorization() { + var authHeader = request.getHeader(HttpHeaders.AUTHORIZATION); + var observerToken = request.getHeader(ServicesAPI.SCOPE_HEADER); + var serverKey = request.getHeader(ServicesAPI.SERVER_KEY_HEADER); + return authorizationManager.validateToken(authHeader, serverKey, observerToken); + } + + @QueryMapping + public List observations() { + return List.of(); + } + + @QueryMapping + public List tasks() { + return List.of(); + } + + @QueryMapping + public List notifications(@Argument(name = "after") float after) { + return List.of(); + } } diff --git a/klab.services.runtime.server/src/main/java/org/integratedmodelling/klab/services/runtime/server/controllers/RuntimeServerController.java b/klab.services.runtime.server/src/main/java/org/integratedmodelling/klab/services/runtime/server/controllers/RuntimeServerController.java index 6885a6d16..5676233ce 100644 --- a/klab.services.runtime.server/src/main/java/org/integratedmodelling/klab/services/runtime/server/controllers/RuntimeServerController.java +++ b/klab.services.runtime.server/src/main/java/org/integratedmodelling/klab/services/runtime/server/controllers/RuntimeServerController.java @@ -27,81 +27,96 @@ @Secured(Role.USER) public class RuntimeServerController { - @Autowired - private RuntimeServer runtimeService; + @Autowired private RuntimeServer runtimeService; - /** - * Observations are set into the digital twin by the context after creating them in an unresolved state. - * The return long ID is the handle to the resolution; according to the messaging protocol, the - * observation tasks should monitor resolution until completion. - * - * @return - */ - @PostMapping(ServicesAPI.RUNTIME.SUBMIT_OBSERVATION) - public @ResponseBody long observe(@RequestBody ResolutionRequest resolutionRequest, Principal principal) { - if (principal instanceof EngineAuthorization authorization) { - var contextScope = - authorization.getScope(ContextScope.class).withResolutionConstraints(resolutionRequest.getResolutionConstraints().toArray(new ResolutionConstraint[0])); - if (contextScope instanceof ServiceContextScope serviceContextScope) { - var agent = - serviceContextScope.getDigitalTwin().knowledgeGraph().requireAgent(resolutionRequest.getAgentName()); - var scope = serviceContextScope - .withResolutionConstraints(ResolutionConstraint.of(ResolutionConstraint.Type.Provenance, agent)); - return runtimeService.klabService().submit(resolutionRequest.getObservation(), scope); - } - } - throw new KlabInternalErrorException("Unexpected implementation of request authorization"); + /** + * Observations are set into the digital twin by the context after creating them in an unresolved + * state. The return long ID is the handle to the resolution; according to the messaging protocol, + * the observation tasks should monitor resolution until completion. + * + * @return + */ + @PostMapping(ServicesAPI.RUNTIME.SUBMIT_OBSERVATION) + public @ResponseBody long observe( + @RequestBody ResolutionRequest resolutionRequest, Principal principal) { + if (principal instanceof EngineAuthorization authorization) { + var contextScope = + authorization + .getScope(ContextScope.class) + .withResolutionConstraints( + resolutionRequest + .getResolutionConstraints() + .toArray(new ResolutionConstraint[0])); + if (contextScope instanceof ServiceContextScope serviceContextScope) { + var agent = + serviceContextScope + .getDigitalTwin() + .knowledgeGraph() + .requireAgent(resolutionRequest.getAgentName()); + var scope = + serviceContextScope.withResolutionConstraints( + ResolutionConstraint.of(ResolutionConstraint.Type.Provenance, agent)); + return runtimeService.klabService().submit(resolutionRequest.getObservation(), scope); + } } + throw new KlabInternalErrorException("Unexpected implementation of request authorization"); + } - @PostMapping(ServicesAPI.RUNTIME.START_RESOLUTION) - public @ResponseBody String startResolution(@RequestBody ResolutionRequest request, Principal principal) { - if (principal instanceof EngineAuthorization authorization) { - var contextScope = - authorization.getScope(ContextScope.class).withResolutionConstraints(request.getResolutionConstraints().toArray(new ResolutionConstraint[0])); - if (contextScope instanceof ServiceContextScope serviceContextScope) { + @PostMapping(ServicesAPI.RUNTIME.START_RESOLUTION) + public @ResponseBody String startResolution( + @RequestBody ResolutionRequest request, Principal principal) { + if (principal instanceof EngineAuthorization authorization) { + var contextScope = + authorization + .getScope(ContextScope.class) + .withResolutionConstraints( + request.getResolutionConstraints().toArray(new ResolutionConstraint[0])); + if (contextScope instanceof ServiceContextScope serviceContextScope) { - var observation = serviceContextScope.getObservation(request.getObservationId()); - runtimeService.klabService().resolve(observation.getId(), serviceContextScope); - return observation.getUrn(); - } - } - throw new KlabInternalErrorException("Unexpected implementation of request authorization"); + var observation = serviceContextScope.getObservation(request.getObservationId()); + runtimeService.klabService().resolve(observation.getId(), serviceContextScope); + return observation.getUrn(); + } } + throw new KlabInternalErrorException("Unexpected implementation of request authorization"); + } - @GetMapping(ServicesAPI.RUNTIME.GET_SESSION_INFO) - public @ResponseBody List getSessionInfo(Principal principal) { - if (principal instanceof EngineAuthorization authorization) { - return runtimeService.klabService().getSessionInfo(authorization.getScope()); - } - return List.of(); + @GetMapping(ServicesAPI.RUNTIME.GET_SESSION_INFO) + public @ResponseBody List getSessionInfo(Principal principal) { + if (principal instanceof EngineAuthorization authorization) { + return runtimeService.klabService().getSessionInfo(authorization.getScope()); } + return List.of(); + } - @PostMapping(ServicesAPI.RUNTIME.RETRIEVE_ASSET) - public @ResponseBody List queryKnowledgeGraph(@RequestBody AssetRequest request, - Principal principal) { - if (principal instanceof EngineAuthorization authorization) { - var contextScope = - authorization.getScope(ContextScope.class); - List queryParameters = new ArrayList<>(); - if (request.getId() != Observation.UNASSIGNED_ID) queryParameters.add(request.getId()); - if (request.getObservable() != null) queryParameters.add(request.getObservable()); - if (request.getGeometry() != null) queryParameters.add(request.getGeometry()); - if (request.getContextObservation() != null) queryParameters.add(request.getContextObservation()); - if (!request.getMetadata().isEmpty()) queryParameters.add(request.getMetadata()); - if (request.getName() != null) queryParameters.add(request.getName()); - return runtimeService.klabService().retrieveAssets(contextScope, - request.getKnowledgeClass().assetClass, queryParameters.toArray()); - } - throw new KlabInternalErrorException("Unexpected implementation of request authorization"); + @PostMapping(ServicesAPI.RUNTIME.RETRIEVE_ASSET) + public @ResponseBody List queryKnowledgeGraph( + @RequestBody AssetRequest request, Principal principal) { + if (principal instanceof EngineAuthorization authorization) { + var contextScope = authorization.getScope(ContextScope.class); + List queryParameters = new ArrayList<>(); + if (request.getId() != Observation.UNASSIGNED_ID) queryParameters.add(request.getId()); + if (request.getObservable() != null) queryParameters.add(request.getObservable()); + if (request.getGeometry() != null) queryParameters.add(request.getGeometry()); + if (request.getContextObservation() != null) + queryParameters.add(request.getContextObservation()); + if (!request.getMetadata().isEmpty()) queryParameters.add(request.getMetadata()); + if (request.getName() != null) queryParameters.add(request.getName()); + return runtimeService + .klabService() + .retrieveAssets( + contextScope, request.getKnowledgeClass().assetClass, queryParameters.toArray()); } + throw new KlabInternalErrorException("Unexpected implementation of request authorization"); + } - @PostMapping(ServicesAPI.RUNTIME.RESOLVE_CONTEXTUALIZERS) - public @ResponseBody ResourceSet resolveContextualizers(@RequestBody List contextualizables, Principal principal) { - if (principal instanceof EngineAuthorization authorization) { - var contextScope = - authorization.getScope(ContextScope.class); - return runtimeService.klabService().resolveContextualizables(contextualizables, contextScope); - } - throw new KlabInternalErrorException("Unexpected implementation of request authorization"); + @PostMapping(ServicesAPI.RUNTIME.RESOLVE_CONTEXTUALIZERS) + public @ResponseBody ResourceSet resolveContextualizers( + @RequestBody List contextualizables, Principal principal) { + if (principal instanceof EngineAuthorization authorization) { + var contextScope = authorization.getScope(ContextScope.class); + return runtimeService.klabService().resolveContextualizables(contextualizables, contextScope); } + throw new KlabInternalErrorException("Unexpected implementation of request authorization"); + } } diff --git a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/ExecutionSequence.java b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/ExecutionSequence.java index 1b7698f28..0a55b824d 100644 --- a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/ExecutionSequence.java +++ b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/ExecutionSequence.java @@ -44,414 +44,438 @@ import java.util.function.Supplier; /** - * Object that follows the execution of the actuators. Each run produces a new context that is the one for the - * next execution. + * Object that follows the execution of the actuators. Each run produces a new context that is the + * one for the next execution. */ public class ExecutionSequence { - private final ServiceContextScope scope; - private final DigitalTwin digitalTwin; - private final ComponentRegistry componentRegistry; - private final double resolvedCoverage; - private final KnowledgeGraph.Operation contextualization; - private final Dataflow dataflow; - private List> sequence = new ArrayList<>(); - private boolean empty; - // the context for the next operation. Starts at the observation and doesn't normally change but - // implementations - // may change it when they return a non-null, non-POD object. - // TODO check if this should be a RuntimeAsset or even an Observation. - private Object currentExecutionContext; - private Map operations = new HashMap<>(); - private Throwable cause; - - public ExecutionSequence(KnowledgeGraph.Operation contextualization, Dataflow dataflow, - ComponentRegistry componentRegistry, ServiceContextScope contextScope) { - this.scope = contextScope; - this.contextualization = contextualization; - this.resolvedCoverage = dataflow instanceof DataflowImpl dataflow1 ? - dataflow1.getResolvedCoverage() : 1.0; - this.componentRegistry = componentRegistry; - this.dataflow = dataflow; - this.digitalTwin = contextScope.getDigitalTwin(); - } - - public boolean compile(Actuator rootActuator) { - - var pairs = sortComputation(rootActuator); - List current = null; - int currentGroup = -1; - for (var pair : pairs) { - if (currentGroup != pair.getSecond()) { - if (current != null) { - sequence.add(current); - } - current = new ArrayList<>(); - } - currentGroup = pair.getSecond(); - current.add(new ExecutorOperation(pair.getFirst())); - } - + private final ServiceContextScope scope; + private final DigitalTwin digitalTwin; + private final ComponentRegistry componentRegistry; + private final double resolvedCoverage; + private final KnowledgeGraph.Operation contextualization; + private final Dataflow dataflow; + private List> sequence = new ArrayList<>(); + private boolean empty; + // the context for the next operation. Starts at the observation and doesn't normally change but + // implementations + // may change it when they return a non-null, non-POD object. + // TODO check if this should be a RuntimeAsset or even an Observation. + private Object currentExecutionContext; + private Map operations = new HashMap<>(); + private Throwable cause; + + public ExecutionSequence( + KnowledgeGraph.Operation contextualization, + Dataflow dataflow, + ComponentRegistry componentRegistry, + ServiceContextScope contextScope) { + this.scope = contextScope; + this.contextualization = contextualization; + this.resolvedCoverage = + dataflow instanceof DataflowImpl dataflow1 ? dataflow1.getResolvedCoverage() : 1.0; + this.componentRegistry = componentRegistry; + this.dataflow = dataflow; + this.digitalTwin = contextScope.getDigitalTwin(); + } + + public boolean compile(Actuator rootActuator) { + + var pairs = sortComputation(rootActuator); + List current = null; + int currentGroup = -1; + for (var pair : pairs) { + if (currentGroup != pair.getSecond()) { if (current != null) { - sequence.add(current); - return true; + sequence.add(current); } + current = new ArrayList<>(); + } + currentGroup = pair.getSecond(); + current.add(new ExecutorOperation(pair.getFirst())); + } - return false; + if (current != null) { + sequence.add(current); + return true; } - public boolean run() { + return false; + } - for (var operationGroup : sequence) { - // groups are sequential; grouped items are parallel. Empty groups are currently possible although - // they should be filtered out, but we leave them for completeness for now as they don't really - // bother anyone. - if (operationGroup.size() == 1) { - if (!operationGroup.getFirst().run()) { - return false; - } - } + public boolean run() { - /* - * Run also the empty operations because execution will update the observations - */ - if (scope.getParallelism() == Parallelism.ONE) { - for (var operation : operationGroup) { - if (!operation.run()) { - - return false; - } - } - } else { - try (ExecutorService taskExecutor = Executors.newVirtualThreadPerTaskExecutor()) { - for (var operation : operationGroup) { - taskExecutor.execute(operation::run); - } - taskExecutor.shutdown(); - if (!taskExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS)) { - return false; - } - } catch (InterruptedException e) { - this.cause = e; - scope.error(e); - } - } + for (var operationGroup : sequence) { + // groups are sequential; grouped items are parallel. Empty groups are currently possible + // although + // they should be filtered out, but we leave them for completeness for now as they don't + // really + // bother anyone. + if (operationGroup.size() == 1) { + if (!operationGroup.getFirst().run()) { + return false; } + } - return true; - } + /* + * Run also the empty operations because execution will update the observations + */ + if (scope.getParallelism() == Parallelism.ONE) { + for (var operation : operationGroup) { + if (!operation.run()) { - /** - * One operation per observation. Successful execution will update the observation in the DT. - */ - class ExecutorOperation { - - private final long id; - private final Observation observation; - protected List> executors = new ArrayList<>(); - private boolean scalar; - private KnowledgeGraph.Operation operation; - - public ExecutorOperation(Actuator actuator) { - this.id = actuator.getId(); - this.operation = operations.get(actuator); - this.observation = scope.getObservation(this.id); - compile(actuator); + return false; + } } + } else { + try (ExecutorService taskExecutor = Executors.newVirtualThreadPerTaskExecutor()) { + for (var operation : operationGroup) { + taskExecutor.execute(operation::run); + } + taskExecutor.shutdown(); + if (!taskExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS)) { + return false; + } + } catch (InterruptedException e) { + this.cause = e; + scope.error(e); + } + } + } - private void compile(Actuator actuator) { - - // TODO compile info for provenance from actuator - - ScalarMapper scalarMapper = null; + return true; + } - // TODO separate scalar calls into groups and compile them into one assembled functor - for (var call : actuator.getComputation()) { + /** One operation per observation. Successful execution will update the observation in the DT. */ + class ExecutorOperation { - var preset = RuntimeService.CoreFunctor.classify(call); - if (preset != null) { - switch (preset) { - case URN_RESOLVER -> { - } - case URN_INSTANTIATOR -> { - } - case EXPRESSION_RESOLVER -> { - } - case LUT_RESOLVER -> { - } - case CONSTANT_RESOLVER -> { - } - } - } + private final long id; + private final Observation observation; + protected List> executors = new ArrayList<>(); + private boolean scalar; + private KnowledgeGraph.Operation operation; - // TODO this should return a list of candidates, to match based on the parameters. For numeric there - // should be a float and double version. - var descriptor = componentRegistry.getFunctionDescriptor(call); - if (descriptor.serviceInfo.getGeometry().isScalar()) { + public ExecutorOperation(Actuator actuator) { + this.id = actuator.getId(); + this.operation = operations.get(actuator); + this.observation = scope.getObservation(this.id); + compile(actuator); + } - if (scalarMapper == null) { - scalarMapper = new ScalarMapper(observation, digitalTwin, scope); - } + private void compile(Actuator actuator) { - /** - * Executor is a class containing all consecutive steps in a single method and - * calling whatever mapping strategy is configured in the scope, using a different - * class per strategy. - */ - scalarMapper.add(call, descriptor); - - System.out.println("SCALAR"); - } else { - if (scalarMapper != null) { - // offload the scalar mapping to the executors - executors.add(scalarMapper::run); - scalarMapper = null; - } + // TODO compile info for provenance from actuator - var scale = Scale.create(observation.getGeometry()); - - // if we're a quality, we need storage at the discretion of the StorageManager. - Storage storage = observation.getObservable().is(SemanticType.QUALITY) ? - digitalTwin.stateStorage().getOrCreateStorage(observation, - Storage.class) : - null; - /* - * Create a runnable with matched parameters and have it set the context observation - * TODO allow multiple methods with same annotation, taking different storage - * implementations, enabling the storage manager to be configured for the wanted precision - * - * Should match arguments, check if they all match, and if not move to the next until - * no available implementations remain. - */ - List runArguments = new ArrayList<>(); - if (descriptor.implementation().method != null) { - for (var argument : descriptor.implementation().method.getParameterTypes()) { - if (ContextScope.class.isAssignableFrom(argument)) { - // TODO consider wrapping into read-only delegating wrappers - runArguments.add(scope); - } else if (Scope.class.isAssignableFrom(argument)) { - runArguments.add(scope); - } else if (Observation.class.isAssignableFrom(argument)) { - runArguments.add(observation); - } else if (ServiceCall.class.isAssignableFrom(argument)) { - runArguments.add(call); - } else if (Parameters.class.isAssignableFrom(argument)) { - runArguments.add(call.getParameters()); - } else if (DoubleStorage.class.isAssignableFrom(argument)) { - storage = digitalTwin.stateStorage().promoteStorage(observation, storage, - DoubleStorage.class); - runArguments.add(storage); - } else if (FloatStorage.class.isAssignableFrom(argument)) { - storage = digitalTwin.stateStorage().promoteStorage(observation, storage, - DoubleStorage.class); - runArguments.add(storage); - } else if (BooleanStorage.class.isAssignableFrom(argument)) { - storage = digitalTwin.stateStorage().promoteStorage(observation, storage, - DoubleStorage.class); - runArguments.add(storage); - } else if (KeyedStorage.class.isAssignableFrom(argument)) { - storage = digitalTwin.stateStorage().promoteStorage(observation, storage, - DoubleStorage.class); - runArguments.add(storage); - } else if (Scale.class.isAssignableFrom(argument)) { - runArguments.add(scale); - } else if (Geometry.class.isAssignableFrom(argument)) { - runArguments.add(scale); - } else if (Observable.class.isAssignableFrom(argument)) { - runArguments.add(observation.getObservable()); - } else if (Space.class.isAssignableFrom(argument)) { - runArguments.add(scale.getSpace()); - } else if (Time.class.isAssignableFrom(argument)) { - runArguments.add(scale.getTime()); - } else { - scope.error("Cannot map argument of type " + argument.getCanonicalName() - + " to known objects in call to " + call.getUrn()); - runArguments.add(null); - } - } - - if (descriptor.staticMethod) { - executors.add(() -> { - try { - var context = descriptor.implementation().method.invoke(null, runArguments.toArray()); - setExecutionContext(context == null ? observation : context); - return true; - } catch (Exception e) { - cause = e; - scope.error(e /* TODO tracing parameters */); - } - return true; - }); - } else if (descriptor.implementation().mainClassInstance != null) { - executors.add(() -> { - try { - var context = descriptor.implementation().method.invoke(descriptor.implementation().mainClassInstance, - runArguments.toArray()); - setExecutionContext(context == null ? observation : context); - return true; - } catch (Exception e) { - cause = e; - scope.error(e /* TODO tracing parameters */); - } - return true; - }); - } - } - } - } + ScalarMapper scalarMapper = null; - if (scalarMapper != null) { - executors.add(scalarMapper::run); - } + // TODO separate scalar calls into groups and compile them into one assembled functor + for (var call : actuator.getComputation()) { + var preset = RuntimeService.CoreFunctor.classify(call); + if (preset != null) { + switch (preset) { + case URN_RESOLVER -> {} + case URN_INSTANTIATOR -> {} + case EXPRESSION_RESOLVER -> {} + case LUT_RESOLVER -> {} + case CONSTANT_RESOLVER -> {} + } } - public boolean run() { - - // TODO compile info for provenance, to be added to the KG at finalization - long start = System.currentTimeMillis(); - for (var executor : executors) { - if (!executor.get()) { - if (operation != null) { - operation.fail(scope, observation, cause); - } - return false; - } + // TODO this should return a list of candidates, to match based on the parameters. For + // numeric there + // should be a float and double version. + var descriptor = componentRegistry.getFunctionDescriptor(call); + if (descriptor.serviceInfo.getGeometry().isScalar()) { + + if (scalarMapper == null) { + scalarMapper = new ScalarMapper(observation, digitalTwin, scope); + } + + /** + * Executor is a class containing all consecutive steps in a single method and calling + * whatever mapping strategy is configured in the scope, using a different class per + * strategy. + */ + scalarMapper.add(call, descriptor); + + System.out.println("SCALAR"); + } else { + if (scalarMapper != null) { + // offload the scalar mapping to the executors + executors.add(scalarMapper::run); + scalarMapper = null; + } + + var scale = Scale.create(observation.getGeometry()); + + // if we're a quality, we need storage at the discretion of the StorageManager. + Storage storage = + observation.getObservable().is(SemanticType.QUALITY) + ? digitalTwin.stateStorage().getOrCreateStorage(observation, Storage.class) + : null; + /* + * Create a runnable with matched parameters and have it set the context observation + * TODO allow multiple methods with same annotation, taking different storage + * implementations, enabling the storage manager to be configured for the wanted precision + * + * Should match arguments, check if they all match, and if not move to the next until + * no available implementations remain. + */ + List runArguments = new ArrayList<>(); + if (descriptor.implementation().method != null) { + for (var argument : descriptor.implementation().method.getParameterTypes()) { + if (ContextScope.class.isAssignableFrom(argument)) { + // TODO consider wrapping into read-only delegating wrappers + runArguments.add(scope); + } else if (Scope.class.isAssignableFrom(argument)) { + runArguments.add(scope); + } else if (Observation.class.isAssignableFrom(argument)) { + runArguments.add(observation); + } else if (ServiceCall.class.isAssignableFrom(argument)) { + runArguments.add(call); + } else if (Parameters.class.isAssignableFrom(argument)) { + runArguments.add(call.getParameters()); + } else if (DoubleStorage.class.isAssignableFrom(argument)) { + storage = + digitalTwin + .stateStorage() + .promoteStorage(observation, storage, DoubleStorage.class); + runArguments.add(storage); + } else if (FloatStorage.class.isAssignableFrom(argument)) { + storage = + digitalTwin + .stateStorage() + .promoteStorage(observation, storage, DoubleStorage.class); + runArguments.add(storage); + } else if (BooleanStorage.class.isAssignableFrom(argument)) { + storage = + digitalTwin + .stateStorage() + .promoteStorage(observation, storage, DoubleStorage.class); + runArguments.add(storage); + } else if (KeyedStorage.class.isAssignableFrom(argument)) { + storage = + digitalTwin + .stateStorage() + .promoteStorage(observation, storage, DoubleStorage.class); + runArguments.add(storage); + } else if (Scale.class.isAssignableFrom(argument)) { + runArguments.add(scale); + } else if (Geometry.class.isAssignableFrom(argument)) { + runArguments.add(scale); + } else if (Observable.class.isAssignableFrom(argument)) { + runArguments.add(observation.getObservable()); + } else if (Space.class.isAssignableFrom(argument)) { + runArguments.add(scale.getSpace()); + } else if (Time.class.isAssignableFrom(argument)) { + runArguments.add(scale.getTime()); + } else { + scope.error( + "Cannot map argument of type " + + argument.getCanonicalName() + + " to known objects in call to " + + call.getUrn()); + runArguments.add(null); + } } - long time = System.currentTimeMillis() - start; - - if (operation != null) { - operation.success(scope, observation, resolvedCoverage); + if (descriptor.staticMethod) { + executors.add( + () -> { + try { + var context = + descriptor.implementation().method.invoke(null, runArguments.toArray()); + setExecutionContext(context == null ? observation : context); + return true; + } catch (Exception e) { + cause = e; + scope.error(e /* TODO tracing parameters */); + } + return true; + }); + } else if (descriptor.implementation().mainClassInstance != null) { + executors.add( + () -> { + try { + var context = + descriptor + .implementation() + .method + .invoke( + descriptor.implementation().mainClassInstance, + runArguments.toArray()); + setExecutionContext(context == null ? observation : context); + return true; + } catch (Exception e) { + cause = e; + scope.error(e /* TODO tracing parameters */); + } + return true; + }); } - - return true; + } } - } + } - private void setExecutionContext(Object returnedValue) { - this.currentExecutionContext = returnedValue; + if (scalarMapper != null) { + executors.add(scalarMapper::run); + } } - public String statusLine() { - return "Execution terminated"; - } - - public Klab.ErrorCode errorCode() { - return Klab.ErrorCode.NO_ERROR; - } + public boolean run() { - public Klab.ErrorContext errorContext() { - return Klab.ErrorContext.RUNTIME; - } + // TODO compile info for provenance, to be added to the KG at finalization + long start = System.currentTimeMillis(); + for (var executor : executors) { + if (!executor.get()) { + if (operation != null) { + operation.fail(scope, observation, cause); + } + return false; + } + } - /** - * TODO this should be something recognized by the notification to fully describe the context of - * execution. - * - * @return - */ - public Object statusInfo() { - return null; - } + long time = System.currentTimeMillis() - start; - public boolean isEmpty() { - return this.empty; - } + if (operation != null) { + operation.success(scope, observation, resolvedCoverage); + } - public ExecutionSequence runActuator(Actuator actuator) { - return this; + return true; } - - - /** - * Establish the order of execution and the possible parallelism. Each root actuator should be sorted by - * dependency and appended in order to the result list along with its order of execution. Successive roots - * can refer to the previous roots but they must be executed sequentially. - *

- * The DigitalTwin is asked to register the actuator in the scope and prepare the environment and state - * for its execution, including defining its contextualization scale in context. - * - * @return - */ - private List> sortComputation(Actuator rootActuator) { - List> ret = new ArrayList<>(); - int executionOrder = 0; - Map branch = new HashMap<>(); - Set group = new HashSet<>(); - var dependencyGraph = computeActuatorOrder(rootActuator); - for (var nextActuator : ImmutableList.copyOf(new TopologicalOrderIterator<>(dependencyGraph))) { - if (nextActuator.getActuatorType() != Actuator.Type.REFERENCE) { - ret.add(Pair.of(nextActuator, (executionOrder = checkExecutionOrder - (executionOrder, nextActuator, dependencyGraph, group)))); - } - } - return ret; + } + + private void setExecutionContext(Object returnedValue) { + this.currentExecutionContext = returnedValue; + } + + public String statusLine() { + return "Execution terminated"; + } + + public Klab.ErrorCode errorCode() { + return Klab.ErrorCode.NO_ERROR; + } + + public Klab.ErrorContext errorContext() { + return Klab.ErrorContext.RUNTIME; + } + + /** + * TODO this should be something recognized by the notification to fully describe the context of + * execution. + * + * @return + */ + public Object statusInfo() { + return null; + } + + public boolean isEmpty() { + return this.empty; + } + + public ExecutionSequence runActuator(Actuator actuator) { + return this; + } + + /** + * Establish the order of execution and the possible parallelism. Each root actuator should be + * sorted by dependency and appended in order to the result list along with its order of + * execution. Successive roots can refer to the previous roots but they must be executed + * sequentially. + * + *

The DigitalTwin is asked to register the actuator in the scope and prepare the environment + * and state for its execution, including defining its contextualization scale in context. + * + * @return + */ + private List> sortComputation(Actuator rootActuator) { + List> ret = new ArrayList<>(); + int executionOrder = 0; + Map branch = new HashMap<>(); + Set group = new HashSet<>(); + var dependencyGraph = computeActuatorOrder(rootActuator); + for (var nextActuator : ImmutableList.copyOf(new TopologicalOrderIterator<>(dependencyGraph))) { + if (nextActuator.getActuatorType() != Actuator.Type.REFERENCE) { + ret.add( + Pair.of( + nextActuator, + (executionOrder = + checkExecutionOrder(executionOrder, nextActuator, dependencyGraph, group)))); + } } - - /** - * If the actuator depends on any in the currentGroup, empty the group and increment the order; otherwise, - * add it to the group and return the same order. - * - * @param executionOrder - * @param current - * @param dependencyGraph - * @param currentGroup - * @return - */ - private int checkExecutionOrder(int executionOrder, Actuator current, - Graph dependencyGraph, - Set currentGroup) { - boolean dependency = false; - for (Actuator previous : currentGroup) { - for (var edge : dependencyGraph.incomingEdgesOf(current)) { - if (currentGroup.contains(dependencyGraph.getEdgeSource(edge))) { - dependency = true; - break; - } - } + return ret; + } + + /** + * If the actuator depends on any in the currentGroup, empty the group and increment the order; + * otherwise, add it to the group and return the same order. + * + * @param executionOrder + * @param current + * @param dependencyGraph + * @param currentGroup + * @return + */ + private int checkExecutionOrder( + int executionOrder, + Actuator current, + Graph dependencyGraph, + Set currentGroup) { + boolean dependency = false; + for (Actuator previous : currentGroup) { + for (var edge : dependencyGraph.incomingEdgesOf(current)) { + if (currentGroup.contains(dependencyGraph.getEdgeSource(edge))) { + dependency = true; + break; } - - if (dependency) { - currentGroup.clear(); - return executionOrder + 1; - } - - currentGroup.add(current); - - return executionOrder; + } } - - private Graph computeActuatorOrder(Actuator rootActuator) { - Graph dependencyGraph = new DefaultDirectedGraph<>(DefaultEdge.class); - Map cache = new HashMap<>(); - loadGraph(rootActuator, dependencyGraph, cache, this.contextualization); - // keep the actuators that do nothing so we can tag their observation as resolved - return dependencyGraph; + if (dependency) { + currentGroup.clear(); + return executionOrder + 1; } - - private void loadGraph(Actuator rootActuator, Graph dependencyGraph, Map cache, KnowledgeGraph.Operation contextualization) { - - var childContextualization = contextualization.createChild(rootActuator, - "Contextualization of " + rootActuator, Activity.Type.CONTEXTUALIZATION); - operations.put(rootActuator, childContextualization); - - cache.put(rootActuator.getId(), rootActuator); - dependencyGraph.addVertex(rootActuator); - for (Actuator child : rootActuator.getChildren()) { - if (child.getActuatorType() == Actuator.Type.REFERENCE) { - dependencyGraph.addEdge(cache.get(child.getId()), rootActuator); - } else { - loadGraph(child, dependencyGraph, cache, childContextualization); - dependencyGraph.addEdge(child, rootActuator); - } - } + currentGroup.add(current); + + return executionOrder; + } + + private Graph computeActuatorOrder(Actuator rootActuator) { + Graph dependencyGraph = new DefaultDirectedGraph<>(DefaultEdge.class); + Map cache = new HashMap<>(); + loadGraph(rootActuator, dependencyGraph, cache, this.contextualization); + // keep the actuators that do nothing so we can tag their observation as resolved + return dependencyGraph; + } + + private void loadGraph( + Actuator rootActuator, + Graph dependencyGraph, + Map cache, + KnowledgeGraph.Operation contextualization) { + + var childContextualization = + contextualization.createChild( + rootActuator, "Contextualization of " + rootActuator, Activity.Type.CONTEXTUALIZATION); + operations.put(rootActuator, childContextualization); + + cache.put(rootActuator.getId(), rootActuator); + dependencyGraph.addVertex(rootActuator); + for (Actuator child : rootActuator.getChildren()) { + if (child.getActuatorType() == Actuator.Type.REFERENCE) { + dependencyGraph.addEdge(cache.get(child.getId()), rootActuator); + } else { + loadGraph(child, dependencyGraph, cache, childContextualization); + dependencyGraph.addEdge(child, rootActuator); + } } + } - public Throwable getCause() { - return cause; - } + public Throwable getCause() { + return cause; + } } diff --git a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/RuntimeService.java b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/RuntimeService.java index 0181a59c3..016852f2a 100644 --- a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/RuntimeService.java +++ b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/RuntimeService.java @@ -1,8 +1,15 @@ package org.integratedmodelling.klab.services.runtime; +import java.io.File; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.qpid.server.SystemLauncher; import org.integratedmodelling.common.authentication.scope.AbstractServiceDelegatingScope; -import org.integratedmodelling.common.knowledge.KnowledgeRepository; import org.integratedmodelling.common.logging.Logging; import org.integratedmodelling.common.runtime.DataflowImpl; import org.integratedmodelling.common.services.RuntimeCapabilitiesImpl; @@ -45,356 +52,386 @@ import org.integratedmodelling.klab.services.scopes.messaging.EmbeddedBroker; import org.integratedmodelling.klab.utilities.Utils; -import java.io.File; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.Future; -import java.util.concurrent.atomic.AtomicBoolean; - -public class RuntimeService extends BaseService implements org.integratedmodelling.klab.api.services.RuntimeService, org.integratedmodelling.klab.api.services.RuntimeService.Admin { - - private String hardwareSignature = - org.integratedmodelling.common.utils.Utils.Strings.hash(Utils.OS.getMACAddress()); - private RuntimeConfiguration configuration; - private KnowledgeGraph knowledgeGraph; - private SystemLauncher systemLauncher; - - public RuntimeService(AbstractServiceDelegatingScope scope, ServiceStartupOptions options) { - super(scope, Type.RUNTIME, options); - ServiceConfiguration.INSTANCE.setMainService(this); - readConfiguration(options); - initializeMessaging(); - } - - private void initializeMessaging() { - if (this.configuration.getBrokerURI() == null) { - this.embeddedBroker = new EmbeddedBroker(); - } +public class RuntimeService extends BaseService + implements org.integratedmodelling.klab.api.services.RuntimeService, + org.integratedmodelling.klab.api.services.RuntimeService.Admin { + + private String hardwareSignature = + org.integratedmodelling.common.utils.Utils.Strings.hash(Utils.OS.getMACAddress()); + private RuntimeConfiguration configuration; + private KnowledgeGraph knowledgeGraph; + private SystemLauncher systemLauncher; + + public RuntimeService(AbstractServiceDelegatingScope scope, ServiceStartupOptions options) { + super(scope, Type.RUNTIME, options); + ServiceConfiguration.INSTANCE.setMainService(this); + readConfiguration(options); + initializeMessaging(); + } + + private void initializeMessaging() { + if (this.configuration.getBrokerURI() == null) { + this.embeddedBroker = new EmbeddedBroker(); } - - private void readConfiguration(ServiceStartupOptions options) { - File config = BaseService.getFileInConfigurationDirectory(options, "runtime.yaml"); - if (config.exists() && config.length() > 0 && !options.isClean()) { - this.configuration = Utils.YAML.load(config, RuntimeConfiguration.class); - } else { - // make an empty config - this.configuration = new RuntimeConfiguration(); - this.configuration.setServiceId(UUID.randomUUID().toString()); - saveConfiguration(); - } + } + + private void readConfiguration(ServiceStartupOptions options) { + File config = BaseService.getFileInConfigurationDirectory(options, "runtime.yaml"); + if (config.exists() && config.length() > 0 && !options.isClean()) { + this.configuration = Utils.YAML.load(config, RuntimeConfiguration.class); + } else { + // make an empty config + this.configuration = new RuntimeConfiguration(); + this.configuration.setServiceId(UUID.randomUUID().toString()); + saveConfiguration(); } - - private boolean createMainKnowledgeGraph() { - // TODO choose the DB from configuration - client or embedded server - var path = BaseService.getConfigurationSubdirectory(startupOptions, "dt").toPath(); - this.knowledgeGraph = new KnowledgeGraphNeo4JEmbedded(path); - return this.knowledgeGraph.isOnline(); + } + + private boolean createMainKnowledgeGraph() { + // TODO choose the DB from configuration - client or embedded server + var path = BaseService.getConfigurationSubdirectory(startupOptions, "dt").toPath(); + this.knowledgeGraph = new KnowledgeGraphNeo4JEmbedded(path); + return this.knowledgeGraph.isOnline(); + } + + public KnowledgeGraph getMainKnowledgeGraph() { + return this.knowledgeGraph; + } + + private void saveConfiguration() { + File config = BaseService.getFileInConfigurationDirectory(startupOptions, "runtime.yaml"); + org.integratedmodelling.common.utils.Utils.YAML.save(this.configuration, config); + } + + @Override + public boolean scopesAreReactive() { + return true; + } + + @Override + public void initializeService() { + + Logging.INSTANCE.setSystemIdentifier("Runtime service: "); + + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceInitializing, + capabilities(serviceScope()).toString()); + + if (createMainKnowledgeGraph()) { + + // TODO internal libraries + getComponentRegistry().loadExtensions("org.integratedmodelling.klab.runtime"); + getComponentRegistry() + .initializeComponents( + BaseService.getConfigurationSubdirectory(startupOptions, "components")); + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceAvailable, + capabilities(serviceScope())); + } else { + + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceUnavailable, + capabilities(serviceScope())); } - - public KnowledgeGraph getMainKnowledgeGraph() { - return this.knowledgeGraph; + } + + @Override + public boolean operationalizeService() { + // nothing to do here + return true; + } + + @Override + public boolean shutdown() { + + /** Close every scope that's scheduled for closing at service shutdown */ + for (var scope : getScopeManager().getScopes(Scope.Type.CONTEXT, ContextScope.class)) { + if (scope instanceof ServiceContextScope serviceContextScope + && serviceContextScope.getPersistence() == Persistence.SERVICE_SHUTDOWN) { + scope.send( + Message.MessageClass.SessionLifecycle, + Message.MessageType.ContextClosed, + scope.getId()); + scope.close(); + Logging.INSTANCE.info("Context " + scope.getId() + " closed upon service shutdown"); + } } - private void saveConfiguration() { - File config = BaseService.getFileInConfigurationDirectory(startupOptions, "runtime.yaml"); - org.integratedmodelling.common.utils.Utils.YAML.save(this.configuration, config); + serviceScope() + .send( + Message.MessageClass.ServiceLifecycle, + Message.MessageType.ServiceUnavailable, + capabilities(serviceScope())); + if (systemLauncher != null) { + systemLauncher.shutdown(); } - - @Override - public boolean scopesAreReactive() { - return true; + if (knowledgeGraph != null) { + knowledgeGraph.shutdown(); } - - @Override - public void initializeService() { - - Logging.INSTANCE.setSystemIdentifier("Runtime service: "); - - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceInitializing, - capabilities(serviceScope()).toString()); - - if (createMainKnowledgeGraph()) { - - // TODO internal libraries - getComponentRegistry().loadExtensions("org.integratedmodelling.klab.runtime"); - getComponentRegistry().initializeComponents(BaseService.getConfigurationSubdirectory(startupOptions, "components")); - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceAvailable - , capabilities(serviceScope())); - } else { - - serviceScope().send(Message.MessageClass.ServiceLifecycle, - Message.MessageType.ServiceUnavailable, capabilities(serviceScope())); - + return super.shutdown(); + } + + @Override + public Capabilities capabilities(Scope scope) { + + var ret = new RuntimeCapabilitiesImpl(); + ret.setLocalName(localName); + ret.setType(Type.RUNTIME); + ret.setUrl(getUrl()); + ret.setServerId(hardwareSignature == null ? null : ("RUNTIME_" + hardwareSignature)); + ret.setServiceId(configuration.getServiceId()); + ret.setServiceName("Runtime"); + ret.setBrokerURI( + embeddedBroker != null ? embeddedBroker.getURI() : configuration.getBrokerURI()); + ret.setBrokerURI( + embeddedBroker != null ? embeddedBroker.getURI() : configuration.getBrokerURI()); + ret.getExportSchemata().putAll(ResourceTransport.INSTANCE.getExportSchemata()); + ret.getImportSchemata().putAll(ResourceTransport.INSTANCE.getImportSchemata()); + + return ret; + } + + public String serviceId() { + return configuration.getServiceId(); + } + + @Override + public Map getExceptionTestcases(Scope scope, boolean deleteExisting) { + Map ret = new HashMap<>(); + return ret; + } + + @Override + public String registerSession(SessionScope sessionScope) { + if (sessionScope instanceof ServiceSessionScope serviceSessionScope) { + + serviceSessionScope.setId(Utils.Names.shortUUID()); + getScopeManager() + .registerScope(serviceSessionScope, capabilities(sessionScope).getBrokerURI()); + + if (serviceSessionScope.getServices(RuntimeService.class).isEmpty()) { + // add self as the runtime service, which is needed by the slave scopes + serviceSessionScope.getServices(RuntimeService.class).add(this); + } + + // all other services need to know the session we created + var fail = new AtomicBoolean(false); + for (var serviceClass : List.of(Resolver.class, Reasoner.class, ResourcesService.class)) { + try { + Thread.ofVirtual() + .start( + () -> { + for (var service : serviceSessionScope.getServices(serviceClass)) { + // if things are OK, the service repeats the ID back + if (!serviceSessionScope + .getId() + .equals(service.registerSession(serviceSessionScope))) { + fail.set(true); + } + } + }) + .join(); + } catch (InterruptedException e) { + fail.set(true); } + } - } + if (fail.get()) { + serviceSessionScope.send( + Notification.error( + "Error registering session with other services:" + " session is inoperative", + UI.Interactivity.DISPLAY)); + serviceSessionScope.setOperative(false); + } - @Override - public boolean operationalizeService() { - // nothing to do here - return true; + return serviceSessionScope.getId(); } - - @Override - public boolean shutdown() { - - /** - * Close every scope that's scheduled for closing at service shutdown - */ - for (var scope : getScopeManager().getScopes(Scope.Type.CONTEXT, ContextScope.class)) { - if (scope instanceof ServiceContextScope serviceContextScope && serviceContextScope.getPersistence() == Persistence.SERVICE_SHUTDOWN) { - scope.send(Message.MessageClass.SessionLifecycle, Message.MessageType.ContextClosed, - scope.getId()); - scope.close(); - Logging.INSTANCE.info("Context " + scope.getId() + " closed upon service shutdown"); - } - } - - serviceScope().send(Message.MessageClass.ServiceLifecycle, Message.MessageType.ServiceUnavailable, - capabilities(serviceScope())); - if (systemLauncher != null) { - systemLauncher.shutdown(); - } - if (knowledgeGraph != null) { - knowledgeGraph.shutdown(); + throw new KlabIllegalArgumentException("unexpected scope class"); + } + + @Override + public String registerContext(ContextScope contextScope) { + + if (contextScope instanceof ServiceContextScope serviceContextScope) { + + serviceContextScope.setId( + serviceContextScope.getParentScope().getId() + "." + Utils.Names.shortUUID()); + getScopeManager() + .registerScope(serviceContextScope, capabilities(contextScope).getBrokerURI()); + serviceContextScope.setDigitalTwin( + new DigitalTwinImpl(this, contextScope, getMainKnowledgeGraph())); + + if (serviceContextScope.getServices(RuntimeService.class).isEmpty()) { + // add self as the runtime service, which is needed by the slave scopes + serviceContextScope.getServices(RuntimeService.class).add(this); + } + + // all other services need to know the context we created. TODO we may also need to + // register with the stats services and maybe any independent authorities + var fail = new AtomicBoolean(false); + for (var serviceClass : List.of(Resolver.class, Reasoner.class, ResourcesService.class)) { + try { + Thread.ofVirtual() + .start( + () -> { + for (var service : serviceContextScope.getServices(serviceClass)) { + // if things are OK, the service repeats the ID back + if (!serviceContextScope + .getId() + .equals(service.registerContext(serviceContextScope))) { + fail.set(true); + } + } + }) + .join(); + } catch (InterruptedException e) { + fail.set(true); } - return super.shutdown(); - } + } - @Override - public Capabilities capabilities(Scope scope) { - - var ret = new RuntimeCapabilitiesImpl(); - ret.setLocalName(localName); - ret.setType(Type.RUNTIME); - ret.setUrl(getUrl()); - ret.setServerId(hardwareSignature == null ? null : ("RUNTIME_" + hardwareSignature)); - ret.setServiceId(configuration.getServiceId()); - ret.setServiceName("Runtime"); - ret.setBrokerURI(embeddedBroker != null ? embeddedBroker.getURI() : configuration.getBrokerURI()); - ret.setBrokerURI(embeddedBroker != null ? embeddedBroker.getURI() : configuration.getBrokerURI()); - ret.getExportSchemata().putAll(ResourceTransport.INSTANCE.getExportSchemata()); - ret.getImportSchemata().putAll(ResourceTransport.INSTANCE.getImportSchemata()); + if (fail.get()) { + serviceContextScope.send( + Notification.error( + "Error registering context with other services:" + " context is inoperative", + UI.Interactivity.DISPLAY)); + serviceContextScope.setOperative(false); + } - return ret; + return serviceContextScope.getId(); } + throw new KlabIllegalArgumentException("unexpected scope class"); + } - public String serviceId() { - return configuration.getServiceId(); - } + @Override + public long submit(Observation observation, ContextScope scope) { - @Override - public Map getExceptionTestcases(Scope scope, boolean deleteExisting) { - Map ret = new HashMap<>(); - return ret; + if (observation.isResolved()) { + // TODO there may be a context for this at some point. + throw new KlabIllegalStateException( + "A resolved observation cannot be submitted to the " + "knowledge graph for now"); } - @Override - public String registerSession(SessionScope sessionScope) { - if (sessionScope instanceof ServiceSessionScope serviceSessionScope) { - - serviceSessionScope.setId(Utils.Names.shortUUID()); - getScopeManager().registerScope(serviceSessionScope, capabilities(sessionScope).getBrokerURI()); - - if (serviceSessionScope.getServices(RuntimeService.class).isEmpty()) { - // add self as the runtime service, which is needed by the slave scopes - serviceSessionScope.getServices(RuntimeService.class).add(this); - } - - // all other services need to know the session we created - var fail = new AtomicBoolean(false); - for (var serviceClass : List.of(Resolver.class, Reasoner.class, ResourcesService.class)) { - try { - Thread.ofVirtual().start(() -> { - for (var service : serviceSessionScope.getServices(serviceClass)) { - // if things are OK, the service repeats the ID back - if (!serviceSessionScope.getId().equals( - service.registerSession(serviceSessionScope))) { - fail.set(true); - } - } - }).join(); - } catch (InterruptedException e) { - fail.set(true); - } - } - - if (fail.get()) { - serviceSessionScope.send(Notification.error( - "Error registering session with other services:" + - " session is inoperative", - UI.Interactivity.DISPLAY)); - serviceSessionScope.setOperative(false); - } - - return serviceSessionScope.getId(); - } - throw new KlabIllegalArgumentException("unexpected scope class"); + if (observation.getObservable().is(SemanticType.QUALITY) + && scope.getContextObservation() == null) { + throw new KlabIllegalStateException("Cannot observe a quality without a context observation"); } - @Override - public String registerContext(ContextScope contextScope) { - - if (contextScope instanceof ServiceContextScope serviceContextScope) { - - serviceContextScope.setId( - serviceContextScope.getParentScope().getId() + "." + Utils.Names.shortUUID()); - getScopeManager().registerScope(serviceContextScope, capabilities(contextScope).getBrokerURI()); - serviceContextScope.setDigitalTwin(new DigitalTwinImpl(this, contextScope, - getMainKnowledgeGraph())); - - if (serviceContextScope.getServices(RuntimeService.class).isEmpty()) { - // add self as the runtime service, which is needed by the slave scopes - serviceContextScope.getServices(RuntimeService.class).add(this); - } - - // all other services need to know the context we created. TODO we may also need to - // register with the stats services and maybe any independent authorities - var fail = new AtomicBoolean(false); - for (var serviceClass : List.of(Resolver.class, Reasoner.class, ResourcesService.class)) { - try { - Thread.ofVirtual().start(() -> { - for (var service : serviceContextScope.getServices(serviceClass)) { - // if things are OK, the service repeats the ID back - if (!serviceContextScope.getId().equals( - service.registerContext(serviceContextScope))) { - fail.set(true); - } - } - }).join(); - } catch (InterruptedException e) { - fail.set(true); - } - } - - if (fail.get()) { - serviceContextScope.send(Notification.error( - "Error registering context with other services:" + - " context is inoperative", - UI.Interactivity.DISPLAY)); - serviceContextScope.setOperative(false); - } - - return serviceContextScope.getId(); - - } - throw new KlabIllegalArgumentException("unexpected scope class"); + /** Only situation when we accept an observation w/o geometry */ + if (observation.getGeometry() == null && observation instanceof ObservationImpl observation1) { + if (observation.getObservable().is(SemanticType.QUALITY) + && scope.getContextObservation() != null) { + observation1.setGeometry(scope.getContextObservation().getGeometry()); + } else if (observation.getObservable().is(SemanticType.COUNTABLE) + && observation.getObservable().isCollective() + && scope.getObserver() != null) { + observation1.setGeometry(scope.getObservedGeometry()); + } } - @Override - public long submit(Observation observation, ContextScope scope) { - - if (observation.isResolved()) { - // TODO there may be a context for this at some point. - throw new KlabIllegalStateException("A resolved observation cannot be submitted to the " + - "knowledge graph for now"); + if (scope instanceof ServiceContextScope serviceContextScope) { + + var digitalTwin = getDigitalTwin(scope); + var parentActivity = Provenance.getActivity(scope); + var agent = getAgent(scope); + + /* + * The initial activity should be in the scope; if not, we're observing at the + * root DT level and we get the context initialization activity as parent. + */ + var instantiation = + digitalTwin + .knowledgeGraph() + .operation( + agent, + parentActivity, + Activity.Type.INSTANTIATION, + "Instantiation of " + observation, + observation, + this); + + try (instantiation) { + + var ret = instantiation.store(observation); + instantiation.link( + instantiation.getActivity(), observation, DigitalTwin.Relationship.CREATED); + if (scope.getContextObservation() != null) { + instantiation.link( + scope.getContextObservation(), observation, DigitalTwin.Relationship.HAS_CHILD); + } else { + instantiation.linkToRootNode(observation, DigitalTwin.Relationship.HAS_CHILD); } - if (observation.getObservable().is(SemanticType.QUALITY) && scope.getContextObservation() == null) { - throw new KlabIllegalStateException("Cannot observe a quality without a context observation"); + if (scope.getObserver() != null) { + instantiation.link( + observation, scope.getObserver(), DigitalTwin.Relationship.HAS_OBSERVER); } - /** - * Only situation when we accept an observation w/o geometry - */ - if (observation.getGeometry() == null && - observation instanceof ObservationImpl observation1) { - if (observation.getObservable().is(SemanticType.QUALITY) && scope.getContextObservation() != null) { - observation1.setGeometry(scope.getContextObservation().getGeometry()); - } else if (observation.getObservable().is(SemanticType.COUNTABLE) && observation.getObservable().isCollective() && scope.getObserver() != null) { - observation1.setGeometry(scope.getObservedGeometry()); - } - } + instantiation.success(scope, observation); - if (scope instanceof ServiceContextScope serviceContextScope) { - - var digitalTwin = getDigitalTwin(scope); - var parentActivity = Provenance.getActivity(scope); - var agent = getAgent(scope); - - /* - * The initial activity should be in the scope; if not, we're observing at the - * root DT level and we get the context initialization activity as parent. - */ - var instantiation = digitalTwin.knowledgeGraph().operation(agent, parentActivity, - Activity.Type.INSTANTIATION, - "Instantiation of " + observation, observation, this); - - try (instantiation) { - - var ret = instantiation.store(observation); - instantiation.link(instantiation.getActivity(), observation, - DigitalTwin.Relationship.CREATED); - if (scope.getContextObservation() != null) { - instantiation.link(scope.getContextObservation(), observation, - DigitalTwin.Relationship.HAS_CHILD); - } else { - instantiation.linkToRootNode(observation, DigitalTwin.Relationship.HAS_CHILD); - } + return ret; - if (scope.getObserver() != null) { - instantiation.link(observation, scope.getObserver(), - DigitalTwin.Relationship.HAS_OBSERVER); - } + } catch (Throwable t) { + instantiation.fail(scope, observation, t); + } + } - instantiation.success(scope, observation); + return Observation.UNASSIGNED_ID; + } - return ret; + private Agent getAgent(ContextScope scope) { - } catch (Throwable t) { - instantiation.fail(scope, observation, t); - } - } - - return Observation.UNASSIGNED_ID; + var ret = Provenance.getAgent(scope); + if (ret != null) { + return ret; } - - private Agent getAgent(ContextScope scope) { - - var ret = Provenance.getAgent(scope); - if (ret != null) { - return ret; - } - if (scope instanceof ServiceContextScope serviceContextScope) { - // assume the user is the agent - return serviceContextScope.getDigitalTwin().knowledgeGraph().user(); - } - throw new KlabIllegalStateException("Cannot determine the requesting agent from scope"); + if (scope instanceof ServiceContextScope serviceContextScope) { + // assume the user is the agent + return serviceContextScope.getDigitalTwin().knowledgeGraph().user(); } + throw new KlabIllegalStateException("Cannot determine the requesting agent from scope"); + } - private Activity getInitializationActivity(Observation observation, ContextScope scope) { - var ret = Provenance.getActivity(scope); - if (ret != null) { - return ret; - } - var activities = getDigitalTwin(scope).knowledgeGraph().get(scope, Activity.class, - Activity.Type.INITIALIZATION); - if (activities.size() == 1) { - return activities.getFirst(); - } - throw new KlabInternalErrorException("cannot locate the context initialization activity"); + private Activity getInitializationActivity(Observation observation, ContextScope scope) { + var ret = Provenance.getActivity(scope); + if (ret != null) { + return ret; } + var activities = + getDigitalTwin(scope) + .knowledgeGraph() + .get(scope, Activity.class, Activity.Type.INITIALIZATION); + if (activities.size() == 1) { + return activities.getFirst(); + } + throw new KlabInternalErrorException("cannot locate the context initialization activity"); + } - @Override - public Future resolve(long id, ContextScope scope) { - - if (scope instanceof ServiceContextScope serviceContextScope) { + @Override + public Future resolve(long id, ContextScope scope) { - var resolver = serviceContextScope.getService(Resolver.class); - var observation = serviceContextScope.getObservation(id); - var digitalTwin = getDigitalTwin(scope); - var parentActivities = digitalTwin.knowledgeGraph().get(scope, Activity.class, - Activity.Type.INSTANTIATION, observation); + if (scope instanceof ServiceContextScope serviceContextScope) { - // TODO check - var parentActivity = parentActivities.getFirst(); - final var ret = new CompletableFuture(); + var resolver = serviceContextScope.getService(Resolver.class); + var observation = serviceContextScope.getObservation(id); + var digitalTwin = getDigitalTwin(scope); + var parentActivities = + digitalTwin + .knowledgeGraph() + .get(scope, Activity.class, Activity.Type.INSTANTIATION, observation); - Thread.ofVirtual().start(() -> { + // TODO check + var parentActivity = parentActivities.getFirst(); + final var ret = new CompletableFuture(); + Thread.ofVirtual() + .start( + () -> { Dataflow dataflow = null; Activity resolutionActivity = null; Observation result = null; @@ -402,210 +439,240 @@ public Future resolve(long id, ContextScope scope) { /* This will commit or rollback at close() */ - var resolution = digitalTwin.knowledgeGraph().operation(digitalTwin.knowledgeGraph().klab() - , parentActivity, Activity.Type.RESOLUTION, - "Resolution of " + observation, resolver); + var resolution = + digitalTwin + .knowledgeGraph() + .operation( + digitalTwin.knowledgeGraph().klab(), + parentActivity, + Activity.Type.RESOLUTION, + "Resolution of " + observation, + resolver); try (resolution) { - result = observation; - scope.send(Message.MessageClass.ObservationLifecycle, - Message.MessageType.ResolutionStarted, result); - try { - // TODO send out the activity with the scope - dataflow = resolver.resolve(observation, scope); - if (dataflow != null) { - resolution.success(scope, result, dataflow, - "Resolution of observation _" + observation.getUrn() + "_ of **" + observation.getObservable().getUrn() + "**", resolver); - scope.send(Message.MessageClass.ObservationLifecycle, - dataflow.isEmpty() ? Message.MessageType.ResolutionUnsuccessful : - Message.MessageType.ResolutionSuccessful, result); - resolutionActivity = resolution.getActivity(); - } else { - resolution.fail(scope, observation); - ret.completeExceptionally(new KlabResourceAccessException("Resolution of " + observation.getUrn() + " failed")); - } - } catch (Throwable t) { - Logging.INSTANCE.error(t); - ret.completeExceptionally(t); - resolution.fail(scope, observation, t); - scope.send(Message.MessageClass.ObservationLifecycle, - Message.MessageType.ResolutionAborted, observation); + result = observation; + scope.send( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ResolutionStarted, + result); + try { + // TODO send out the activity with the scope + dataflow = resolver.resolve(observation, scope); + if (dataflow != null) { + resolution.success( + scope, + result, + dataflow, + "Resolution of observation _" + + observation.getUrn() + + "_ of **" + + observation.getObservable().getUrn() + + "**", + resolver); + scope.send( + Message.MessageClass.ObservationLifecycle, + dataflow.isEmpty() + ? Message.MessageType.ResolutionUnsuccessful + : Message.MessageType.ResolutionSuccessful, + result); + resolutionActivity = resolution.getActivity(); + } else { + resolution.fail(scope, observation); + ret.completeExceptionally( + new KlabResourceAccessException( + "Resolution of " + observation.getUrn() + " failed")); } - } catch (Throwable t) { + } catch (Throwable t) { Logging.INSTANCE.error(t); - scope.send(Message.MessageClass.ObservationLifecycle, - Message.MessageType.ResolutionAborted, observation); - resolution.fail(scope, observation, t); ret.completeExceptionally(t); + resolution.fail(scope, observation, t); + scope.send( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ResolutionAborted, + observation); + } + } catch (Throwable t) { + Logging.INSTANCE.error(t); + scope.send( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ResolutionAborted, + observation); + resolution.fail(scope, observation, t); + ret.completeExceptionally(t); } if (!ret.isCompletedExceptionally() && dataflow != null && !dataflow.isEmpty()) { - /* - this will commit all resources at close() - */ - var contextualization = - digitalTwin.knowledgeGraph().operation(digitalTwin.knowledgeGraph().klab(), - resolutionActivity, Activity.Type.EXECUTION, - "Execution of resolved dataflow to contextualize " + observation, - dataflow, - this); - - try (contextualization) { - // TODO contextualization gets its own activities to use in operations - // (dependent on resolution) linked to actuators by runDataflow - result = runDataflow(dataflow, scope, contextualization); - ret.complete(result); - contextualization.success(scope, dataflow, result); - } catch (Throwable t) { - Logging.INSTANCE.error(t); - contextualization.fail(scope, dataflow, result, t); - ret.completeExceptionally(t); - } + /* + this will commit all resources at close() + */ + var contextualization = + digitalTwin + .knowledgeGraph() + .operation( + digitalTwin.knowledgeGraph().klab(), + resolutionActivity, + Activity.Type.EXECUTION, + "Execution of resolved dataflow to contextualize " + observation, + dataflow, + this); + + try (contextualization) { + // TODO contextualization gets its own activities to use in operations + // (dependent on resolution) linked to actuators by runDataflow + result = runDataflow(dataflow, scope, contextualization); + ret.complete(result); + contextualization.success(scope, dataflow, result); + } catch (Throwable t) { + Logging.INSTANCE.error(t); + contextualization.fail(scope, dataflow, result, t); + ret.completeExceptionally(t); + } } - }); - - return ret; - } - - throw new KlabInternalErrorException("Digital twin is inaccessible because of unexpected scope " + - "implementation"); - } + }); - @Override - public Observation runDataflow(Dataflow dataflow, ContextScope contextScope) { - // TODO fill in the operation representing an external dataflow run - return runDataflow(dataflow, contextScope, null); + return ret; } - public Observation runDataflow(Dataflow dataflow, ContextScope contextScope, - KnowledgeGraph.Operation contextualization) { - - /* - Load or confirm availability of all needed resources and create any non-existing observations - */ - - - /* - find contextualization scale and hook point into the DT from the scope - */ - - - if (contextScope instanceof ServiceContextScope serviceContextScope) { - /** - * Run each actuator set in order - */ - for (var rootActuator : dataflow.getComputation()) { - var executionSequence = new ExecutionSequence(contextualization, dataflow, - getComponentRegistry(), serviceContextScope); - executionSequence.compile(rootActuator); - if (!executionSequence.isEmpty()) { - if (!executionSequence.run()) { - contextualization.fail(contextScope, dataflow.getTarget(), - executionSequence.getCause()); - return Observation.empty(); - } - } - } - - /* - intersect coverage from dataflow with contextualization scale - */ - - if (dataflow instanceof DataflowImpl df && dataflow.getTarget() instanceof ObservationImpl obs) { - obs.setResolved(true); - obs.setResolvedCoverage(df.getResolvedCoverage()); - } - - contextualization.success(contextScope, dataflow.getTarget(), dataflow); - + throw new KlabInternalErrorException( + "Digital twin is inaccessible because of unexpected scope " + "implementation"); + } + + @Override + public Observation runDataflow(Dataflow dataflow, ContextScope contextScope) { + // TODO fill in the operation representing an external dataflow run + return runDataflow(dataflow, contextScope, null); + } + + public Observation runDataflow( + Dataflow dataflow, + ContextScope contextScope, + KnowledgeGraph.Operation contextualization) { + + /* + Load or confirm availability of all needed resources and create any non-existing observations + */ + + /* + find contextualization scale and hook point into the DT from the scope + */ + + if (contextScope instanceof ServiceContextScope serviceContextScope) { + /** Run each actuator set in order */ + for (var rootActuator : dataflow.getComputation()) { + var executionSequence = + new ExecutionSequence( + contextualization, dataflow, getComponentRegistry(), serviceContextScope); + executionSequence.compile(rootActuator); + if (!executionSequence.isEmpty()) { + if (!executionSequence.run()) { + contextualization.fail( + contextScope, dataflow.getTarget(), executionSequence.getCause()); + return Observation.empty(); + } } + } - return dataflow.getTarget(); - } + /* + intersect coverage from dataflow with contextualization scale + */ - private DigitalTwin getDigitalTwin(ContextScope contextScope) { - if (contextScope instanceof ServiceContextScope serviceContextScope) { - return serviceContextScope.getDigitalTwin(); - } - throw new KlabInternalErrorException("Digital twin is inaccessible because of unexpected scope " + - "implementation"); - } + if (dataflow instanceof DataflowImpl df + && dataflow.getTarget() instanceof ObservationImpl obs) { + obs.setResolved(true); + obs.setResolvedCoverage(df.getResolvedCoverage()); + } - @Override - public List retrieveAssets(ContextScope contextScope, Class assetClass, - Object... queryParameters) { - return knowledgeGraph.get(contextScope, assetClass, queryParameters); + contextualization.success(contextScope, dataflow.getTarget(), dataflow); } - @Override - public ResourceSet resolveContextualizables(List contextualizables, - ContextScope scope) { - - ResourceSet ret = new ResourceSet(); - // TODO FIXME USE ALL SERVICES - var resourcesService = scope.getService(ResourcesService.class); - /** - * These are the contextualizables that need resolution at the runtime side, the others come with - * their definition and are directly inserted in the dataflow - */ - for (var contextualizable : contextualizables) { - if (contextualizable.getServiceCall() != null) { - var resolution = - resourcesService.resolveServiceCall(contextualizable.getServiceCall().getUrn(), - contextualizable.getServiceCall().getRequiredVersion(), - scope); - if (resolution.isEmpty()) { - return resolution; - } - - // HERE we should use the knowledge repository, which needs to be specialized to hold - // components. OR the ingest(resourceset, scope) should - // be in the SERVICE and use the KR as needed. Load plugins, resource->adapters, service - // calls, projects, models etc. according to the needs of the - // service. - if (!ingestResources(resolution, scope)) { - return ResourceSet.empty(Notification.error("Cannot receive resources from " + resourcesService.getServiceName())); - } - ret = Utils.Resources.merge(ret, resolution); - } else if (contextualizable.getResourceUrn() != null) { - // TODO ensure resource or adapter is accessible - // var resolution = resourcesService.resolveRe(contextualizable - // .getServiceCall().getUrn(), scope); - // if (resolution.isEmpty()) { - // return resolution; - // } - } - } - - return ret; - } + return dataflow.getTarget(); + } - @Override - public List getSessionInfo(Scope scope) { - return knowledgeGraph.getSessionInfo(scope); + private DigitalTwin getDigitalTwin(ContextScope contextScope) { + if (contextScope instanceof ServiceContextScope serviceContextScope) { + return serviceContextScope.getDigitalTwin(); } - - @Override - public boolean releaseSession(SessionScope scope) { - try { - scope.close(); - return true; - } catch (Throwable t) { - // + throw new KlabInternalErrorException( + "Digital twin is inaccessible because of unexpected scope " + "implementation"); + } + + @Override + public List retrieveAssets( + ContextScope contextScope, Class assetClass, Object... queryParameters) { + return knowledgeGraph.get(contextScope, assetClass, queryParameters); + } + + @Override + public ResourceSet resolveContextualizables( + List contextualizables, ContextScope scope) { + + ResourceSet ret = new ResourceSet(); + // TODO FIXME USE ALL SERVICES + var resourcesService = scope.getService(ResourcesService.class); + /** + * These are the contextualizables that need resolution at the runtime side, the others come + * with their definition and are directly inserted in the dataflow + */ + for (var contextualizable : contextualizables) { + if (contextualizable.getServiceCall() != null) { + var resolution = + resourcesService.resolveServiceCall( + contextualizable.getServiceCall().getUrn(), + contextualizable.getServiceCall().getRequiredVersion(), + scope); + if (resolution.isEmpty()) { + return resolution; } - return false; - } - @Override - public boolean releaseContext(ContextScope scope) { - try { - scope.close(); - return true; - } catch (Throwable t) { - // + // HERE we should use the knowledge repository, which needs to be specialized to hold + // components. OR the ingest(resourceset, scope) should + // be in the SERVICE and use the KR as needed. Load plugins, resource->adapters, service + // calls, projects, models etc. according to the needs of the + // service. + if (!ingestResources(resolution, scope)) { + return ResourceSet.empty( + Notification.error( + "Cannot receive resources from " + resourcesService.getServiceName())); } - return false; + ret = Utils.Resources.merge(ret, resolution); + } else if (contextualizable.getResourceUrn() != null) { + // TODO ensure resource or adapter is accessible + // var resolution = resourcesService.resolveRe(contextualizable + // .getServiceCall().getUrn(), scope); + // if (resolution.isEmpty()) { + // return resolution; + // } + } } + return ret; + } + + @Override + public List getSessionInfo(Scope scope) { + return knowledgeGraph.getSessionInfo(scope); + } + + @Override + public boolean releaseSession(SessionScope scope) { + try { + scope.close(); + return true; + } catch (Throwable t) { + // + } + return false; + } + + @Override + public boolean releaseContext(ContextScope scope) { + try { + scope.close(); + return true; + } catch (Throwable t) { + // + } + return false; + } } diff --git a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/ScalarMapper.java b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/ScalarMapper.java index 85624fcc7..d754089c5 100644 --- a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/ScalarMapper.java +++ b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/ScalarMapper.java @@ -15,62 +15,69 @@ import org.integratedmodelling.klab.services.scopes.ServiceContextScope; /** - * Scalar executor providing handling and caching for LUTs, classifications and expression and implementing - * the mapping strategy configured in the runtime. + * Scalar executor providing handling and caching for LUTs, classifications and expression and + * implementing the mapping strategy configured in the runtime. */ public class ScalarMapper { - private final DigitalTwin digitalTwin; - private final ServiceContextScope scope; - private final Observation targetObservation; - private final Class storageClass; - - public ScalarMapper(Observation target, DigitalTwin digitalTwin, ServiceContextScope scope) { - - this.targetObservation = target; - this.digitalTwin = digitalTwin; - this.scope = scope; - - // observation should admit scalar values - this.storageClass = switch (target.getObservable().getArtifactType()) { - case BOOLEAN -> BooleanStorage.class; - case NUMBER -> /* TODO use config to choose between double and float */ DoubleStorage.class; - case TEXT, CONCEPT -> KeyedStorage.class; - default -> - throw new KlabIllegalStateException("scalar mapping to type " + target.getObservable().getArtifactType() + " not supported"); + private final DigitalTwin digitalTwin; + private final ServiceContextScope scope; + private final Observation targetObservation; + private final Class storageClass; + + public ScalarMapper(Observation target, DigitalTwin digitalTwin, ServiceContextScope scope) { + + this.targetObservation = target; + this.digitalTwin = digitalTwin; + this.scope = scope; + + // observation should admit scalar values + this.storageClass = + switch (target.getObservable().getArtifactType()) { + case BOOLEAN -> BooleanStorage.class; + case NUMBER -> /* TODO use config to choose between double and float */ + DoubleStorage.class; + case TEXT, CONCEPT -> KeyedStorage.class; + default -> + throw new KlabIllegalStateException( + "scalar mapping to type " + + target.getObservable().getArtifactType() + + " not supported"); }; - } + } - public void add(ServiceCall serviceCall, ComponentRegistry.FunctionDescriptor descriptor) { + public void add(ServiceCall serviceCall, ComponentRegistry.FunctionDescriptor descriptor) { - // check out the expected data value vs. the observation + // check out the expected data value vs. the observation - // if needed, adjust the storage class + // if needed, adjust the storage class - System.out.println("ADD CALL " + serviceCall); - } - - public boolean run() { + System.out.println("ADD CALL " + serviceCall); + } - // determine storage - var storage = digitalTwin.stateStorage().getOrCreateStorage(targetObservation, storageClass); + public boolean run() { - // call storage.map() with the correct executor and configuration - // TODO masking - switch (storage) { - case DoubleStorage doubleStorage -> { doubleStorage.map(getDoubleMapper());} - case FloatStorage doubleStorage -> {} - case BooleanStorage booleanStorage -> {} - case KeyedStorage keyedStorage -> {} - default -> throw new KlabInternalErrorException("unexpected storage type in ScalarMapper run()"); - } + // determine storage + var storage = digitalTwin.stateStorage().getOrCreateStorage(targetObservation, storageClass); - return true; + // call storage.map() with the correct executor and configuration + // TODO masking + switch (storage) { + case DoubleStorage doubleStorage -> { + doubleStorage.map(getDoubleMapper()); + } + case FloatStorage doubleStorage -> {} + case BooleanStorage booleanStorage -> {} + case KeyedStorage keyedStorage -> {} + default -> + throw new KlabInternalErrorException("unexpected storage type in ScalarMapper run()"); } - private DoubleStorage.OffsetToDoubleFunction getDoubleMapper() { - // TODO - return null; - } + return true; + } + private DoubleStorage.OffsetToDoubleFunction getDoubleMapper() { + // TODO + return null; + } } diff --git a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/digitaltwin/DigitalTwinImpl.java b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/digitaltwin/DigitalTwinImpl.java index d6bab9e22..fbfdfb08a 100644 --- a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/digitaltwin/DigitalTwinImpl.java +++ b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/digitaltwin/DigitalTwinImpl.java @@ -15,46 +15,45 @@ public class DigitalTwinImpl implements DigitalTwin { - KnowledgeGraph knowledgeGraph; - StateStorage stateStorage; - ContextScope rootScope; - - public DigitalTwinImpl(RuntimeService service, ContextScope scope, KnowledgeGraph database) { - this.rootScope = scope; - this.knowledgeGraph = database.contextualize(scope); - this.stateStorage = new StateStorageImpl(service, scope); - } - - @Override - public KnowledgeGraph knowledgeGraph() { - return this.knowledgeGraph; - } - - @Override - public StateStorage stateStorage() { - return this.stateStorage; - } - - @Override - public boolean ingest(Data data, Observation target) { - // TODO - return false; - } - - @Override - public Provenance getProvenanceGraph(ContextScope context) { - return new ProvenanceGraph(this.knowledgeGraph, this.rootScope); - } - - @Override - public Dataflow getDataflowGraph(ContextScope context) { - return new DataflowGraph(this.knowledgeGraph, this.rootScope); - } - - @Override - public void dispose() { - this.knowledgeGraph.deleteContext(); - this.stateStorage.clear(); - } - + KnowledgeGraph knowledgeGraph; + StateStorage stateStorage; + ContextScope rootScope; + + public DigitalTwinImpl(RuntimeService service, ContextScope scope, KnowledgeGraph database) { + this.rootScope = scope; + this.knowledgeGraph = database.contextualize(scope); + this.stateStorage = new StateStorageImpl(service, scope); + } + + @Override + public KnowledgeGraph knowledgeGraph() { + return this.knowledgeGraph; + } + + @Override + public StateStorage stateStorage() { + return this.stateStorage; + } + + @Override + public boolean ingest(Data data, Observation target) { + // TODO + return false; + } + + @Override + public Provenance getProvenanceGraph(ContextScope context) { + return new ProvenanceGraph(this.knowledgeGraph, this.rootScope); + } + + @Override + public Dataflow getDataflowGraph(ContextScope context) { + return new DataflowGraph(this.knowledgeGraph, this.rootScope); + } + + @Override + public void dispose() { + this.knowledgeGraph.deleteContext(); + this.stateStorage.clear(); + } } diff --git a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/AbstractKnowledgeGraph.java b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/AbstractKnowledgeGraph.java index cc9b19e34..71670b05d 100644 --- a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/AbstractKnowledgeGraph.java +++ b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/AbstractKnowledgeGraph.java @@ -29,150 +29,161 @@ public abstract class AbstractKnowledgeGraph implements KnowledgeGraph { - protected static int MAX_CACHED_OBSERVATIONS = 400; - - protected ContextScope scope; - // protected LoadingCache assetCache = - // CacheBuilder.newBuilder().maximumSize(MAX_CACHED_OBSERVATIONS).build(new CacheLoader() { - // @Override - // public RuntimeAsset load(Long key) throws Exception { - // return retrieve(key, RuntimeAsset.class, scope); - // } - // }); - - /** - * Return a RuntimeAsset representing the overall dataflow related to the scope, so that it can be used - * for linking using the other CRUD methods. - * - * @return the dataflow root node, unique for the context. - * @throws org.integratedmodelling.klab.api.exceptions.KlabIllegalStateException if the graph is not - * contextualized. - */ - protected abstract RuntimeAsset getDataflowNode(); - - protected abstract long nextKey(); - - /** - * Return a RuntimeAsset representing the overall provenance related to the scope, so that it can be used - * for linking using the other CRUD methods. - * - * @return the dataflow root node, unique for the context. - * @throws org.integratedmodelling.klab.api.exceptions.KlabIllegalStateException if the graph is not - * contextualized. - */ - protected abstract RuntimeAsset getProvenanceNode(); - - /** - * Retrieve the asset with the passed key. - * - * @param key - * @param assetClass - * @param - * @return - */ - protected abstract T retrieve(Object key, Class assetClass, Scope scope); - - /** - * Store the passed asset, return its unique long ID. - * - * @param asset - * @param additionalProperties any pair of properties we want overridden. Pass pairs and do it right or - * you'll get an exception. - * @return - */ - protected abstract long store(RuntimeAsset asset, Scope scope, Object... additionalProperties); - - /** - * Link the two passed assets. - * - * @param source - * @param destination - * @param additionalProperties any pair of properties we want overridden. Pass pairs and do it right or - * you'll get an exception. - */ - protected abstract void link(RuntimeAsset source, RuntimeAsset destination, - DigitalTwin.Relationship relationship, Scope scope, - Object... additionalProperties); - - @Override - public T get(long id, Class resultClass) { - return retrieve(id, resultClass, scope); - } - - /** - * Define all properties for the passed asset. - * - * @param asset - * @param additionalParameters any pair of additional parameters to add - * @return - */ - protected Map asParameters(Object asset, Object... additionalParameters) { - Map ret = new HashMap<>(); - if (asset != null) { - switch (asset) { - case Observation observation -> { - - ret.putAll(observation.getMetadata()); - ret.put("name", observation.getName() == null ? observation.getObservable().codeName() - : observation.getName()); - ret.put("updated", observation.getLastUpdate()); - ret.put("resolved", observation.isResolved()); - ret.put("type", observation.getType().name()); - ret.put("urn", observation.getUrn()); - ret.put("semantictype", SemanticType.fundamentalType( - observation.getObservable().getSemantics().getType()).name()); - ret.put("semantics", observation.getObservable().getSemantics().getUrn()); - ret.put("observable", observation.getObservable().getUrn()); - ret.put("id", observation.getId()); - } - case Agent agent -> { - // TODO - } - case ActuatorImpl actuator -> { - - ret.put("observationId", actuator.getId()); - ret.put("id", actuator.getInternalId()); - StringBuilder code = new StringBuilder(); - for (var call : actuator.getComputation()) { - // TODO skip any recursive resolution calls and prepare for linking later - code.append(call.encode(Language.DEFAULT_EXPRESSION_LANGUAGE)).append("\n"); - } - ret.put("semantics", actuator.getObservable().getUrn()); - ret.put("computation", code.toString()); - ret.put("strategy", actuator.getStrategyUrn()); - } - case Activity activity -> { - ret.putAll(activity.getMetadata()); - ret.put("credits", activity.getCredits()); - ret.put("description", activity.getDescription()); - ret.put("end", activity.getEnd()); - ret.put("start", activity.getStart()); - ret.put("schedulerTime", activity.getSchedulerTime()); - ret.put("size", activity.getSize()); - ret.put("type", activity.getType().name()); - ret.put("name", activity.getName()); - ret.put("id", activity.getId()); - ret.put("serviceId", activity.getServiceId()); - ret.put("observationUrn", activity.getObservationUrn()); - ret.put("serviceName", activity.getServiceName()); - ret.put("serviceType", activity.getServiceType() == null ? null : activity.getServiceType().name()); - ret.put("dataflow", activity.getDataflow()); - ret.put("outcome", activity.getOutcome() == null ? null : activity.getOutcome().name()); - ret.put("stackTrace", activity.getStackTrace()); - } - default -> throw new KlabInternalErrorException( - "unexpected value for asParameters: " + asset.getClass().getCanonicalName()); - } + protected static int MAX_CACHED_OBSERVATIONS = 400; + + protected ContextScope scope; + + // protected LoadingCache assetCache = + // CacheBuilder.newBuilder().maximumSize(MAX_CACHED_OBSERVATIONS).build(new + // CacheLoader() { + // @Override + // public RuntimeAsset load(Long key) throws Exception { + // return retrieve(key, RuntimeAsset.class, scope); + // } + // }); + + /** + * Return a RuntimeAsset representing the overall dataflow related to the scope, so that it can be + * used for linking using the other CRUD methods. + * + * @return the dataflow root node, unique for the context. + * @throws org.integratedmodelling.klab.api.exceptions.KlabIllegalStateException if the graph is + * not contextualized. + */ + protected abstract RuntimeAsset getDataflowNode(); + + protected abstract long nextKey(); + + /** + * Return a RuntimeAsset representing the overall provenance related to the scope, so that it can + * be used for linking using the other CRUD methods. + * + * @return the dataflow root node, unique for the context. + * @throws org.integratedmodelling.klab.api.exceptions.KlabIllegalStateException if the graph is + * not contextualized. + */ + protected abstract RuntimeAsset getProvenanceNode(); + + /** + * Retrieve the asset with the passed key. + * + * @param key + * @param assetClass + * @param + * @return + */ + protected abstract T retrieve( + Object key, Class assetClass, Scope scope); + + /** + * Store the passed asset, return its unique long ID. + * + * @param asset + * @param additionalProperties any pair of properties we want overridden. Pass pairs and do it + * right or you'll get an exception. + * @return + */ + protected abstract long store(RuntimeAsset asset, Scope scope, Object... additionalProperties); + + /** + * Link the two passed assets. + * + * @param source + * @param destination + * @param additionalProperties any pair of properties we want overridden. Pass pairs and do it + * right or you'll get an exception. + */ + protected abstract void link( + RuntimeAsset source, + RuntimeAsset destination, + DigitalTwin.Relationship relationship, + Scope scope, + Object... additionalProperties); + + @Override + public T get(long id, Class resultClass) { + return retrieve(id, resultClass, scope); + } + + /** + * Define all properties for the passed asset. + * + * @param asset + * @param additionalParameters any pair of additional parameters to add + * @return + */ + protected Map asParameters(Object asset, Object... additionalParameters) { + Map ret = new HashMap<>(); + if (asset != null) { + switch (asset) { + case Observation observation -> { + ret.putAll(observation.getMetadata()); + ret.put( + "name", + observation.getName() == null + ? observation.getObservable().codeName() + : observation.getName()); + ret.put("updated", observation.getLastUpdate()); + ret.put("resolved", observation.isResolved()); + ret.put("type", observation.getType().name()); + ret.put("urn", observation.getUrn()); + ret.put( + "semantictype", + SemanticType.fundamentalType(observation.getObservable().getSemantics().getType()) + .name()); + ret.put("semantics", observation.getObservable().getSemantics().getUrn()); + ret.put("observable", observation.getObservable().getUrn()); + ret.put("id", observation.getId()); } - - if (additionalParameters != null) { - for (int i = 0; i < additionalParameters.length; i++) { - ret.put(additionalParameters[i].toString(), additionalParameters[++i]); - } + case Agent agent -> { + // TODO } + case ActuatorImpl actuator -> { + ret.put("observationId", actuator.getId()); + ret.put("id", actuator.getInternalId()); + StringBuilder code = new StringBuilder(); + for (var call : actuator.getComputation()) { + // TODO skip any recursive resolution calls and prepare for linking later + code.append(call.encode(Language.DEFAULT_EXPRESSION_LANGUAGE)).append("\n"); + } + ret.put("semantics", actuator.getObservable().getUrn()); + ret.put("computation", code.toString()); + ret.put("strategy", actuator.getStrategyUrn()); + } + case Activity activity -> { + ret.putAll(activity.getMetadata()); + ret.put("credits", activity.getCredits()); + ret.put("description", activity.getDescription()); + ret.put("end", activity.getEnd()); + ret.put("start", activity.getStart()); + ret.put("schedulerTime", activity.getSchedulerTime()); + ret.put("size", activity.getSize()); + ret.put("type", activity.getType().name()); + ret.put("name", activity.getName()); + ret.put("id", activity.getId()); + ret.put("serviceId", activity.getServiceId()); + ret.put("observationUrn", activity.getObservationUrn()); + ret.put("serviceName", activity.getServiceName()); + ret.put( + "serviceType", + activity.getServiceType() == null ? null : activity.getServiceType().name()); + ret.put("dataflow", activity.getDataflow()); + ret.put("outcome", activity.getOutcome() == null ? null : activity.getOutcome().name()); + ret.put("stackTrace", activity.getStackTrace()); + } + default -> + throw new KlabInternalErrorException( + "unexpected value for asParameters: " + asset.getClass().getCanonicalName()); + } + } - return Utils.Maps.removeNullValues(ret); + if (additionalParameters != null) { + for (int i = 0; i < additionalParameters.length; i++) { + ret.put(additionalParameters[i].toString(), additionalParameters[++i]); + } } + return Utils.Maps.removeNullValues(ret); + } } diff --git a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JClient.java b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JClient.java index 5045ff95f..004915367 100644 --- a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JClient.java +++ b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JClient.java @@ -18,36 +18,34 @@ public class KnowledgeGraphNeo4JClient extends KnowledgeGraphNeo4j implements KnowledgeGraph { - // TODO connect to a DB and run a driver - - - @Override - public KnowledgeGraph contextualize(ContextScope scope) { - return null; - } - - @Override - public T get(long id, Class resultClass) { - return null; - } - - @Override - public List get(RuntimeAsset source, DigitalTwin.Relationship linkType, Class resultClass) { - return List.of(); - } - - @Override - public KnowledgeGraph merge(URL remoteDigitalTwinURL) { - return null; - } - - @Override - public boolean isOnline() { - return false; - } - - @Override - public void shutdown() { - - } + // TODO connect to a DB and run a driver + + @Override + public KnowledgeGraph contextualize(ContextScope scope) { + return null; + } + + @Override + public T get(long id, Class resultClass) { + return null; + } + + @Override + public List get( + RuntimeAsset source, DigitalTwin.Relationship linkType, Class resultClass) { + return List.of(); + } + + @Override + public KnowledgeGraph merge(URL remoteDigitalTwinURL) { + return null; + } + + @Override + public boolean isOnline() { + return false; + } + + @Override + public void shutdown() {} } diff --git a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JEmbedded.java b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JEmbedded.java index 43d62f303..d8f8d7948 100644 --- a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JEmbedded.java +++ b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JEmbedded.java @@ -1,5 +1,9 @@ package org.integratedmodelling.klab.services.runtime.neo4j; +import java.net.URL; +import java.nio.file.Path; +import java.time.Duration; +import java.util.List; import org.integratedmodelling.common.logging.Logging; import org.integratedmodelling.klab.api.data.KnowledgeGraph; import org.integratedmodelling.klab.api.data.RuntimeAsset; @@ -15,130 +19,127 @@ import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.io.ByteUnit; -import java.net.URL; -import java.nio.file.Path; -import java.time.Duration; -import java.util.List; - /** - * A local, embedded, persistent k.LAB-instrumented, configurable Neo4j database. To work with the f'ing - * community edition the database must be a singleton within the service, containing data for all contexts. + * A local, embedded, persistent k.LAB-instrumented, configurable Neo4j database. To work with the + * f'ing community edition the database must be a singleton within the service, containing data for + * all contexts. */ public class KnowledgeGraphNeo4JEmbedded extends KnowledgeGraphNeo4j implements KnowledgeGraph { - private static final String DEFAULT_DATABASE_NAME = "klab"; - private DatabaseManagementService managementService; - private GraphDatabaseService graphDb; - private boolean online = true; - - private KnowledgeGraphNeo4JEmbedded(KnowledgeGraphNeo4JEmbedded parent, ContextScope scope) { - this.managementService = parent.managementService; - this.graphDb = parent.graphDb; - this.online = parent.online; - this.scope = scope; - this.driver = parent.driver; - } - - /** - * @param directory + private static final String DEFAULT_DATABASE_NAME = "klab"; + private DatabaseManagementService managementService; + private GraphDatabaseService graphDb; + private boolean online = true; + + private KnowledgeGraphNeo4JEmbedded(KnowledgeGraphNeo4JEmbedded parent, ContextScope scope) { + this.managementService = parent.managementService; + this.graphDb = parent.graphDb; + this.online = parent.online; + this.scope = scope; + this.driver = parent.driver; + } + + /** + * @param directory + */ + public KnowledgeGraphNeo4JEmbedded(Path directory) { + + /* + * TODO tie the performance parameters to runtime configuration */ - public KnowledgeGraphNeo4JEmbedded(Path directory) { + try { + this.managementService = + new DatabaseManagementServiceBuilder(directory) + .setConfig(GraphDatabaseSettings.initial_default_database, DEFAULT_DATABASE_NAME) + .setConfig(GraphDatabaseSettings.pagecache_memory, ByteUnit.mebiBytes(512)) + .setConfig(GraphDatabaseSettings.transaction_timeout, Duration.ofSeconds(60)) + .setConfig(GraphDatabaseSettings.preallocate_logical_logs, true) + .setConfig(BoltConnector.enabled, true) // for the driver + .setConfig(HttpConnector.enabled, true) // for debugging (?) + .build(); - /* - * TODO tie the performance parameters to runtime configuration - */ - try { - this.managementService = new DatabaseManagementServiceBuilder(directory) - .setConfig(GraphDatabaseSettings.initial_default_database, DEFAULT_DATABASE_NAME) - .setConfig(GraphDatabaseSettings.pagecache_memory, ByteUnit.mebiBytes(512)) - .setConfig(GraphDatabaseSettings.transaction_timeout, Duration.ofSeconds(60)) - .setConfig(GraphDatabaseSettings.preallocate_logical_logs, true) - .setConfig(BoltConnector.enabled, true) // for the driver - .setConfig(HttpConnector.enabled, true) // for debugging (?) - .build(); + this.graphDb = managementService.database(DEFAULT_DATABASE_NAME); - this.graphDb = managementService.database(DEFAULT_DATABASE_NAME); + // TODO this could just reimplement query() to use the DB directly and not expose the + // connectors, losing debugging access outside the application + this.driver = GraphDatabase.driver("bolt://localhost:7687"); - // TODO this could just reimplement query() to use the DB directly and not expose the - // connectors, losing debugging access outside the application - this.driver = GraphDatabase.driver("bolt://localhost:7687"); + this.driver.verifyConnectivity(); - this.driver.verifyConnectivity(); + configureDatabase(); - configureDatabase(); + Logging.INSTANCE.info("Embedded Neo4J database initialized"); - Logging.INSTANCE.info("Embedded Neo4J database initialized"); - - Runtime.getRuntime().addShutdownHook(new Thread() { + Runtime.getRuntime() + .addShutdownHook( + new Thread() { @Override public void run() { - managementService.shutdown(); + managementService.shutdown(); } - }); - - } catch (Throwable t) { - Logging.INSTANCE.error("Error initializing Neo4J embedded database", t); - this.online = false; - } - } - - private void configureDatabase() { - - // TODO all the needed indices + }); - // IndexDefinition usernamesIndex; - // try ( Transaction tx = graphDb.beginTx() ) - // { - // Schema schema = tx.schema(); - // usernamesIndex = schema.indexFor(Label.label( "User" ) ) - // .on( "username" ) - // .withName( "usernames" ) - // .create(); - // tx.commit(); - // } + } catch (Throwable t) { + Logging.INSTANCE.error("Error initializing Neo4J embedded database", t); + this.online = false; } + } - @Override - public KnowledgeGraph contextualize(ContextScope scope) { + private void configureDatabase() { - if (this.scope != null) { + // TODO all the needed indices - // idempotence - if (this.scope.getId().equals(scope.getId())) { - return this; - } + // IndexDefinition usernamesIndex; + // try ( Transaction tx = graphDb.beginTx() ) + // { + // Schema schema = tx.schema(); + // usernamesIndex = schema.indexFor(Label.label( "User" ) ) + // .on( "username" ) + // .withName( "usernames" ) + // .create(); + // tx.commit(); + // } + } - throw new KlabIllegalStateException("cannot recontextualize a previously contextualized graph " + - "database"); - } + @Override + public KnowledgeGraph contextualize(ContextScope scope) { - var ret = new KnowledgeGraphNeo4JEmbedded(this, scope); + if (this.scope != null) { - ret.initializeContext(); + // idempotence + if (this.scope.getId().equals(scope.getId())) { + return this; + } - return ret; + throw new KlabIllegalStateException( + "cannot recontextualize a previously contextualized graph " + "database"); } - @Override - public List get(RuntimeAsset source, DigitalTwin.Relationship linkType, - Class resultClass) { - return List.of(); - } + var ret = new KnowledgeGraphNeo4JEmbedded(this, scope); - @Override - public KnowledgeGraph merge(URL remoteDigitalTwinURL) { - return null; - } + ret.initializeContext(); - @Override - public boolean isOnline() { - return this.online; - } + return ret; + } + @Override + public List get( + RuntimeAsset source, DigitalTwin.Relationship linkType, Class resultClass) { + return List.of(); + } - @Override - public void shutdown() { - managementService.shutdown(); - } + @Override + public KnowledgeGraph merge(URL remoteDigitalTwinURL) { + return null; + } + + @Override + public boolean isOnline() { + return this.online; + } + @Override + public void shutdown() { + managementService.shutdown(); + } } diff --git a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JRAM.java b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JRAM.java index c7ec86380..4a8d273a9 100644 --- a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JRAM.java +++ b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4JRAM.java @@ -15,37 +15,34 @@ import java.net.URL; import java.util.List; -public class KnowledgeGraphNeo4JRAM extends KnowledgeGraphNeo4j implements KnowledgeGraph { - - @Override - public KnowledgeGraph contextualize(ContextScope scope) { - return null; - } - - @Override - public T get(long id, Class resultClass) { - return null; - } - - @Override - public List get(RuntimeAsset source, DigitalTwin.Relationship linkType, Class resultClass) { - return List.of(); - } - - @Override - public KnowledgeGraph merge(URL remoteDigitalTwinURL) { - return null; - } - - @Override - public boolean isOnline() { - return false; - } - - - @Override - public void shutdown() { - - } - +public class KnowledgeGraphNeo4JRAM extends KnowledgeGraphNeo4j implements KnowledgeGraph { + + @Override + public KnowledgeGraph contextualize(ContextScope scope) { + return null; + } + + @Override + public T get(long id, Class resultClass) { + return null; + } + + @Override + public List get( + RuntimeAsset source, DigitalTwin.Relationship linkType, Class resultClass) { + return List.of(); + } + + @Override + public KnowledgeGraph merge(URL remoteDigitalTwinURL) { + return null; + } + + @Override + public boolean isOnline() { + return false; + } + + @Override + public void shutdown() {} } diff --git a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4j.java b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4j.java index 4a729f330..dfcc262b6 100644 --- a/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4j.java +++ b/klab.services.runtime/src/main/java/org/integratedmodelling/klab/services/runtime/neo4j/KnowledgeGraphNeo4j.java @@ -42,1186 +42,1288 @@ import java.util.*; /** - * TODO check spatial queries: https://www.lyonwj.com/blog/neo4j-spatial-procedures-congressional-boundaries - * and https://neo4j-contrib.github.io/spatial/0.24-neo4j-3.1/index.html - *

- * TODO must figure out where the heck the neo4j-spatial-5.20.0.jar is (no, it's not in - * https://github.com/neo4j-contrib/m2 nor in osgeo) + * TODO check spatial queries: + * https://www.lyonwj.com/blog/neo4j-spatial-procedures-congressional-boundaries and + * https://neo4j-contrib.github.io/spatial/0.24-neo4j-3.1/index.html + * + *

TODO must figure out where the heck the neo4j-spatial-5.20.0.jar is (no, it's not in + * https://github.com/neo4j-contrib/m2 nor in osgeo) */ public abstract class KnowledgeGraphNeo4j extends AbstractKnowledgeGraph { - protected Driver driver; - protected Agent user; - protected Agent klab; - protected String rootContextId; - private RuntimeAsset contextNode; - private RuntimeAsset dataflowNode; - private RuntimeAsset provenanceNode; - - // all predefined Cypher queries - interface Queries { - - String REMOVE_CONTEXT = "match (n:Context {id: $contextId})-[*]->(c) detach delete n, c"; - String FIND_CONTEXT = "MATCH (ctx:Context {id: $contextId}) RETURN ctx"; - String CREATE_WITH_PROPERTIES = "CREATE (n:{type}) SET n = $properties RETURN n"; - String UPDATE_PROPERTIES = "MATCH (n:{type} {id: $id}) SET n += $properties RETURN n"; - String[] INITIALIZATION_QUERIES = new String[]{ - "MERGE (user:Agent {name: $username, type: 'USER'})", - "MERGE (klab:Agent {name: 'k.LAB', type: 'AI'})", - "MATCH (klab:Agent {name: 'k.LAB'}), (user:Agent {name: $username}) CREATE // main context " + - "node\n" - + "\t(ctx:Context {id: $contextId, name: $name, user: $username, created: " + - "$timestamp, " + - "expiration: $expirationType}),\n" - + "\t// main provenance and dataflow nodes\n" - + "\t(prov:Provenance {name: 'Provenance', id: $contextId + '.PROVENANCE'}), " + - "(df:Dataflow " + - "{name: 'Dataflow', id: $contextId + '.DATAFLOW'}),\n" - + "\t(ctx)-[:HAS_PROVENANCE]->(prov),\n" - + "\t(ctx)-[:HAS_DATAFLOW]->(df),\n" - + "\t(prov)-[:HAS_AGENT]->(user),\n" - + "\t(prov)-[:HAS_AGENT]->(klab),\n" - + "\t// ACTIVITY that created the whole thing\n" - + "\t(creation:Activity {start: $timestamp, end: $timestamp, name: " + - "'INITIALIZATION', id: $activityId}),\n" - + "\t// created by user\n" - + "\t(creation)-[:BY_AGENT]->(user),\n" - + "\t(ctx)<-[:CREATED]-(creation),\n" - + "(prov)-[:HAS_CHILD]->(creation)"}; - String GET_AGENT_BY_NAME = "match (ctx:Context {id: $contextId})-->(prov:Provenance)-[:HAS_AGENT]->" + - "(a:Agent {name: $agentName}) RETURN a"; - } - - /** - * A provenance-linked "transaction" that can be committed or rolled back by reporting failure or success. - * The related activity becomes part of the graph in any case and success/failure is recorded with it. - * Everything else stored or linked is rolled back in case of failure. - */ - public class OperationImpl implements Operation { - - private ActivityImpl activity; - private Agent agent; - private Transaction transaction; - private Scope.Status outcome; - private Throwable exception; - private Object[] assets; - private OperationImpl parent; - private List children = new ArrayList<>(); - private Actuator actuator; - - @Override - public Agent getAgent() { - return this.agent; - } - - @Override - public Activity getActivity() { - return this.activity; - } - - @Override - public Operation createChild(Object... activityData) { + protected Driver driver; + protected Agent user; + protected Agent klab; + protected String rootContextId; + private RuntimeAsset contextNode; + private RuntimeAsset dataflowNode; + private RuntimeAsset provenanceNode; + + // all predefined Cypher queries + interface Queries { + + String REMOVE_CONTEXT = "match (n:Context {id: $contextId})-[*]->(c) detach delete n, c"; + String FIND_CONTEXT = "MATCH (ctx:Context {id: $contextId}) RETURN ctx"; + String CREATE_WITH_PROPERTIES = "CREATE (n:{type}) SET n = $properties RETURN n"; + String UPDATE_PROPERTIES = "MATCH (n:{type} {id: $id}) SET n += $properties RETURN n"; + String[] INITIALIZATION_QUERIES = + new String[] { + "MERGE (user:Agent {name: $username, type: 'USER'})", + "MERGE (klab:Agent {name: 'k.LAB', type: 'AI'})", + "MATCH (klab:Agent {name: 'k.LAB'}), (user:Agent {name: $username}) CREATE // main context " + + "node\n" + + "\t(ctx:Context {id: $contextId, name: $name, user: $username, created: " + + "$timestamp, " + + "expiration: $expirationType}),\n" + + "\t// main provenance and dataflow nodes\n" + + "\t(prov:Provenance {name: 'Provenance', id: $contextId + '.PROVENANCE'}), " + + "(df:Dataflow " + + "{name: 'Dataflow', id: $contextId + '.DATAFLOW'}),\n" + + "\t(ctx)-[:HAS_PROVENANCE]->(prov),\n" + + "\t(ctx)-[:HAS_DATAFLOW]->(df),\n" + + "\t(prov)-[:HAS_AGENT]->(user),\n" + + "\t(prov)-[:HAS_AGENT]->(klab),\n" + + "\t// ACTIVITY that created the whole thing\n" + + "\t(creation:Activity {start: $timestamp, end: $timestamp, name: " + + "'INITIALIZATION', id: $activityId}),\n" + + "\t// created by user\n" + + "\t(creation)-[:BY_AGENT]->(user),\n" + + "\t(ctx)<-[:CREATED]-(creation),\n" + + "(prov)-[:HAS_CHILD]->(creation)" + }; + String GET_AGENT_BY_NAME = + "match (ctx:Context {id: $contextId})-->(prov:Provenance)-[:HAS_AGENT]->" + + "(a:Agent {name: $agentName}) RETURN a"; + } + + /** + * A provenance-linked "transaction" that can be committed or rolled back by reporting failure or + * success. The related activity becomes part of the graph in any case and success/failure is + * recorded with it. Everything else stored or linked is rolled back in case of failure. + */ + public class OperationImpl implements Operation { + + private ActivityImpl activity; + private Agent agent; + private Transaction transaction; + private Scope.Status outcome; + private Throwable exception; + private Object[] assets; + private OperationImpl parent; + private List children = new ArrayList<>(); + private Actuator actuator; - var activity = new ActivityImpl(); - activity.setStart(System.currentTimeMillis()); + @Override + public Agent getAgent() { + return this.agent; + } - var ret = new OperationImpl(); + @Override + public Activity getActivity() { + return this.activity; + } + @Override + public Operation createChild(Object... activityData) { + + var activity = new ActivityImpl(); + activity.setStart(System.currentTimeMillis()); + + var ret = new OperationImpl(); + + ret.agent = agent; + ret.transaction = transaction; + ret.parent = this; + + if (activityData != null) { + for (Object o : activityData) { + if (o instanceof ActivityImpl a) { + activity = a; + } else if (o instanceof Activity.Type type) { + activity.setType(type); + } else if (o instanceof String description) { + activity.setDescription(description); + } else if (o instanceof Agent agent) { ret.agent = agent; - ret.transaction = transaction; - ret.parent = this; - - if (activityData != null) { - for (Object o : activityData) { - if (o instanceof ActivityImpl a) { - activity = a; - } else if (o instanceof Activity.Type type) { - activity.setType(type); - } else if (o instanceof String description) { - activity.setDescription(description); - } else if (o instanceof Agent agent) { - ret.agent = agent; - } else if (o instanceof ActuatorImpl actuator) { - ret.actuator = actuator; - } - } - } - - store(activity); - link(this.activity, activity, DigitalTwin.Relationship.TRIGGERED); - link(activity, agent, DigitalTwin.Relationship.BY_AGENT); - - ret.activity = activity; - - this.children.add(ret); - - return ret; + } else if (o instanceof ActuatorImpl actuator) { + ret.actuator = actuator; + } } + } - @Override - public long store(RuntimeAsset asset, Object... additionalProperties) { - return KnowledgeGraphNeo4j.this.store(transaction, asset, scope, additionalProperties); - } + store(activity); + link(this.activity, activity, DigitalTwin.Relationship.TRIGGERED); + link(activity, agent, DigitalTwin.Relationship.BY_AGENT); - @Override - public void link(RuntimeAsset source, RuntimeAsset destination, - DigitalTwin.Relationship relationship, Object... additionalProperties) { - KnowledgeGraphNeo4j.this.link(transaction, source, destination, relationship, scope, - additionalProperties); - } + ret.activity = activity; - @Override - public void linkToRootNode(RuntimeAsset destination, - DigitalTwin.Relationship relationship, Object... additionalProperties) { - var rootNode = switch (destination) { - case Actuator ignored -> dataflowNode; - case Activity ignored -> provenanceNode; - case Observation ignored -> contextNode; - default -> throw new KlabIllegalStateException("Unexpected value: " + destination); - }; - KnowledgeGraphNeo4j.this.link(transaction, rootNode, destination, relationship, scope, - additionalProperties); - } - - @Override - public Operation success(ContextScope scope, Object... assets) { - this.outcome = Scope.Status.FINISHED; - // updates as needed (activity end, observation resolved if type == resolution, context timestamp - this.assets = assets; - return this; - } + this.children.add(ret); - @Override - public Operation fail(ContextScope scope, Object... assets) { - // rollback; update activity end and context timestamp only, if we have an error or throwable - // update activity - this.outcome = Scope.Status.ABORTED; - this.assets = assets; - return this; - } + return ret; + } - @Override - public void close() throws IOException { - - List childActuators = new ArrayList<>(); - - for (var child : children) { - child.close(); - if (child.actuator != null) { - childActuators.add(child.actuator); - } - } - - this.activity.setEnd(System.currentTimeMillis()); - this.activity.setOutcome(outcome == null ? Activity.Outcome.INTERNAL_FAILURE : - (outcome == Scope.Status.FINISHED ? Activity.Outcome.SUCCESS : - Activity.Outcome.FAILURE)); - - // commit or rollback based on status after success() or fail(). If none has been - // called, status is null and this is an internal error, logged with the activity - - ObservationImpl observation = null; - double coverage = 1.0; - var resolutionEmpty = false; - - if (assets != null) { - for (var asset : assets) { - if (asset instanceof ObservationImpl obs) { - observation = obs; - activity.setObservationUrn(obs.getUrn()); - } else if (asset instanceof Throwable t) { - activity.setStackTrace(ExceptionUtils.getStackTrace(t)); - } else if (asset instanceof Dataflow dataflow) { - resolutionEmpty = dataflow.isEmpty(); - } - } - } - - if (resolutionEmpty && activity.getType() == Activity.Type.RESOLUTION) { - activity.setOutcome(Activity.Outcome.FAILURE); - activity.setDescription("Resolution of " + observation + " failed"); - } - - if (this.actuator != null) { - store(actuator); - link(this.activity, this.actuator, DigitalTwin.Relationship.HAS_PLAN); - for (Actuator childActuator : childActuators) { - link(this.actuator, childActuator, DigitalTwin.Relationship.HAS_CHILD); - } - } else { - for (Actuator childActuator : childActuators) { - link(dataflowNode, childActuator, DigitalTwin.Relationship.HAS_CHILD); - } - } - - if (observation != null && this.actuator != null && outcome == Scope.Status.FINISHED) { - // TODO add state and histogram - link(this.activity, observation, DigitalTwin.Relationship.CONTEXTUALIZED); - } - - if (parent == null) { - - if (outcome == null) { - // Log an internal failure (no success or failure, should not happen) - Logging.INSTANCE.error("Internal error: activity did not properly finish: " + activity); - scope.send(Message.MessageClass.ObservationLifecycle, - Message.MessageType.ActivityAborted, activity); - transaction.rollback(); - } else if (outcome == Scope.Status.FINISHED) { - scope.send(Message.MessageClass.ObservationLifecycle, - Message.MessageType.ActivityFinished, activity); - transaction.commit(); - } else if (outcome == Scope.Status.ABORTED) { - scope.send(Message.MessageClass.ObservationLifecycle, - Message.MessageType.ActivityAborted, activity); - transaction.rollback(); - } - - // npw that transactions are done, update all observations and activities w.r.t. the ones - // contained here - updateAssets(); - } + @Override + public long store(RuntimeAsset asset, Object... additionalProperties) { + return KnowledgeGraphNeo4j.this.store(transaction, asset, scope, additionalProperties); + } - } + @Override + public void link( + RuntimeAsset source, + RuntimeAsset destination, + DigitalTwin.Relationship relationship, + Object... additionalProperties) { + KnowledgeGraphNeo4j.this.link( + transaction, source, destination, relationship, scope, additionalProperties); + } - private void updateAssets() { - - for (var child : children) { - child.updateAssets(); - } - - ObservationImpl observation = null; - double coverage = 1.0; - if (assets != null) { - for (var asset : assets) { - if (asset instanceof ObservationImpl obs) { - observation = obs; - } else if (asset instanceof Double d) { - coverage = d; - } else if (asset instanceof Long l) { - this.activity.setCredits(l); - } else if (asset instanceof Throwable throwable) { - this.activity.setStackTrace(ExceptionUtils.getStackTrace(throwable)); - } - } - } - - if (observation != null && outcome == Scope.Status.FINISHED) { - if (this.activity.getType() == Activity.Type.CONTEXTUALIZATION) { - observation.setResolved(true); - observation.setResolvedCoverage(coverage); - } - update(observation, scope); - if (observation.getGeometry() != null) { - storeGeometry(observation.getGeometry(), observation); - } - } - - update(this.activity, scope); + @Override + public void linkToRootNode( + RuntimeAsset destination, + DigitalTwin.Relationship relationship, + Object... additionalProperties) { + var rootNode = + switch (destination) { + case Actuator ignored -> dataflowNode; + case Activity ignored -> provenanceNode; + case Observation ignored -> contextNode; + default -> throw new KlabIllegalStateException("Unexpected value: " + destination); + }; + KnowledgeGraphNeo4j.this.link( + transaction, rootNode, destination, relationship, scope, additionalProperties); + } - } + @Override + public Operation success(ContextScope scope, Object... assets) { + this.outcome = Scope.Status.FINISHED; + // updates as needed (activity end, observation resolved if type == resolution, context + // timestamp + this.assets = assets; + return this; } + @Override + public Operation fail(ContextScope scope, Object... assets) { + // rollback; update activity end and context timestamp only, if we have an error or throwable + // update activity + this.outcome = Scope.Status.ABORTED; + this.assets = assets; + return this; + } @Override - public Operation operation(Agent agent, Activity parentActivity, Activity.Type activityType, - Object... data) { + public void close() throws IOException { - // validate arguments and complain loudly if anything is missing. Must have agent and activity - if (agent == null) { - throw new KlabInternalErrorException("Knowledge graph operation: agent is null"); - } + List childActuators = new ArrayList<>(); - // create and commit the activity record as a node, possibly linked to a parent - // activity. - - // open the transaction for the remaining operations - - var activity = new ActivityImpl(); - activity.setType(activityType); - activity.setStart(System.currentTimeMillis()); - activity.setName(activityType.name()); - - var ret = new OperationImpl(); - - ret.activity = activity; - ret.agent = agent; - - // select arguments and put them where they belong - if (data != null) { - for (var dat : data) { - if (dat instanceof String description) { - ret.activity.setDescription(description); - } else if (dat instanceof OperationImpl operation) { - ret.parent = operation; - } else if (dat instanceof KlabService service) { - activity.setServiceId(service.serviceId()); - activity.setServiceName(service.getServiceName()); - activity.setServiceType(KlabService.Type.classify(service)); - } else if (dat instanceof Dataflow dataflow) { - activity.setDataflow(new DataflowEncoder(dataflow, scope).toString()); - } - } + for (var child : children) { + child.close(); + if (child.actuator != null) { + childActuators.add(child.actuator); } - - KnowledgeGraphNeo4j.this.store(activity, scope); - KnowledgeGraphNeo4j.this.link(activity, agent, DigitalTwin.Relationship.BY_AGENT, scope); - if (parentActivity != null) { - KnowledgeGraphNeo4j.this.link(parentActivity, activity, DigitalTwin.Relationship.TRIGGERED, - scope); - } else { - KnowledgeGraphNeo4j.this.link(provenanceNode, activity, DigitalTwin.Relationship.HAS_CHILD, - scope); + } + + this.activity.setEnd(System.currentTimeMillis()); + this.activity.setOutcome( + outcome == null + ? Activity.Outcome.INTERNAL_FAILURE + : (outcome == Scope.Status.FINISHED + ? Activity.Outcome.SUCCESS + : Activity.Outcome.FAILURE)); + + // commit or rollback based on status after success() or fail(). If none has been + // called, status is null and this is an internal error, logged with the activity + + ObservationImpl observation = null; + double coverage = 1.0; + var resolutionEmpty = false; + + if (assets != null) { + for (var asset : assets) { + if (asset instanceof ObservationImpl obs) { + observation = obs; + activity.setObservationUrn(obs.getUrn()); + } else if (asset instanceof Throwable t) { + activity.setStackTrace(ExceptionUtils.getStackTrace(t)); + } else if (asset instanceof Dataflow dataflow) { + resolutionEmpty = dataflow.isEmpty(); + } + } + } + + if (resolutionEmpty && activity.getType() == Activity.Type.RESOLUTION) { + activity.setOutcome(Activity.Outcome.FAILURE); + activity.setDescription("Resolution of " + observation + " failed"); + } + + if (this.actuator != null) { + store(actuator); + link(this.activity, this.actuator, DigitalTwin.Relationship.HAS_PLAN); + for (Actuator childActuator : childActuators) { + link(this.actuator, childActuator, DigitalTwin.Relationship.HAS_CHILD); + } + } else { + for (Actuator childActuator : childActuators) { + link(dataflowNode, childActuator, DigitalTwin.Relationship.HAS_CHILD); + } + } + + if (observation != null && this.actuator != null && outcome == Scope.Status.FINISHED) { + // TODO add state and histogram + link(this.activity, observation, DigitalTwin.Relationship.CONTEXTUALIZED); + } + + if (parent == null) { + + if (outcome == null) { + // Log an internal failure (no success or failure, should not happen) + Logging.INSTANCE.error("Internal error: activity did not properly finish: " + activity); + scope.send( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ActivityAborted, + activity); + transaction.rollback(); + } else if (outcome == Scope.Status.FINISHED) { + scope.send( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ActivityFinished, + activity); + transaction.commit(); + } else if (outcome == Scope.Status.ABORTED) { + scope.send( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ActivityAborted, + activity); + transaction.rollback(); } - scope.send(Message.MessageClass.ObservationLifecycle, Message.MessageType.ActivityStarted, - ret.activity); - - // open transaction if we are the root operation. We only commit within it. - ret.transaction = ret.parent == null ? - // this open a new session per transaction. Probably expensive but safe as - // transactions can't co-occur within a session. - driver.session().beginTransaction(TransactionConfig.builder().withTimeout(Duration.ZERO).build()) : - ret.parent.transaction; - - return ret; + // npw that transactions are done, update all observations and activities w.r.t. the ones + // contained here + updateAssets(); + } } - protected EagerResult query(String query, Map parameters, Scope scope) { - if (isOnline()) { - try { - // System.out.printf("\nQUERY " + query + "\n WITH " + parameters); - return driver.executableQuery(query).withParameters(parameters).execute(); - } catch (Throwable t) { - if (scope != null) { - scope.error(t.getMessage(), t); - } else { - Logging.INSTANCE.error(t); - } - } + private void updateAssets() { + + for (var child : children) { + child.updateAssets(); + } + + ObservationImpl observation = null; + double coverage = 1.0; + if (assets != null) { + for (var asset : assets) { + if (asset instanceof ObservationImpl obs) { + observation = obs; + } else if (asset instanceof Double d) { + coverage = d; + } else if (asset instanceof Long l) { + this.activity.setCredits(l); + } else if (asset instanceof Throwable throwable) { + this.activity.setStackTrace(ExceptionUtils.getStackTrace(throwable)); + } } - return null; - } + } - protected Result query(Transaction transaction, String query, Map parameters, - Scope scope) { - if (isOnline()) { - try { - return transaction.run(query, parameters); - } catch (Throwable t) { - if (scope != null) { - scope.error(t.getMessage(), t); - } else { - Logging.INSTANCE.error(t); - } - } + if (observation != null && outcome == Scope.Status.FINISHED) { + if (this.activity.getType() == Activity.Type.CONTEXTUALIZATION) { + observation.setResolved(true); + observation.setResolvedCoverage(coverage); } - return null; - } - - /** - * Ensure things are OK re: main agents and the like. Must be called only once - */ - protected void initializeContext() { - - this.rootContextId = scope.getId(); - - var result = query(Queries.FIND_CONTEXT, Map.of("contextId", scope.getId()), scope); - - if (result.records().isEmpty()) { - long timestamp = System.currentTimeMillis(); - var activityId = nextKey(); - for (var query : Queries.INITIALIZATION_QUERIES) { - query(query, Map.of( - "contextId", scope.getId(), - "name", scope.getName(), - "timestamp", timestamp, - "username", scope.getUser().getUsername(), - "expirationType", scope.getPersistence().name(), - "activityId", activityId), - scope); - } - + update(observation, scope); + if (observation.getGeometry() != null) { + storeGeometry(observation.getGeometry(), observation); } + } - final var dataflowNodeId = nextKey(); - final var provenanceNodeId = nextKey(); - final var contextNodeId = nextKey(); - - this.dataflowNode = new RuntimeAsset() { - @Override - public long getId() { - return dataflowNodeId; - } - - @Override - public Type classify() { - // check - scope isn't a runtime asset - return Type.DATAFLOW; - } - }; - - this.provenanceNode = new RuntimeAsset() { - @Override - public long getId() { - return provenanceNodeId; - } - - @Override - public Type classify() { - // check - scope isn't a runtime asset - return Type.PROVENANCE; - } - }; - - this.contextNode = new RuntimeAsset() { - @Override - public long getId() { - return contextNodeId; - } - - @Override - public Type classify() { - // as a marker - scope isn't a runtime asset - return Type.ARTIFACT; - } - }; - - this.user = adapt( - query( - Queries.GET_AGENT_BY_NAME, - Map.of("contextId", scope.getId(), "agentName", scope.getUser().getUsername()), - scope), - Agent.class, scope).getFirst(); - this.klab = adapt(query( - Queries.GET_AGENT_BY_NAME, - Map.of("contextId", scope.getId(), "agentName", "k.LAB"), scope), Agent.class, scope).getFirst(); + update(this.activity, scope); } + } - @Override - public void deleteContext() { - query(Queries.REMOVE_CONTEXT, Map.of("contextId", scope.getId()), scope); + @Override + public Operation operation( + Agent agent, Activity parentActivity, Activity.Type activityType, Object... data) { + + // validate arguments and complain loudly if anything is missing. Must have agent and activity + if (agent == null) { + throw new KlabInternalErrorException("Knowledge graph operation: agent is null"); } - /** - * @param query - * @param cls - * @param - * @return - */ - protected List adapt(EagerResult query, Class cls, Scope scope) { - - List ret = new ArrayList<>(); - - for (var record : query.records()) { - - Value node = null; - Map properties = new HashMap<>(); - if (!record.values().isEmpty()) { - // must be one field for the node - node = record.values().getFirst(); - } - - if (node == null) { - continue; - } - - if (Map.class.isAssignableFrom(cls)) { - - ret.add((T) node.asMap(Map.of())); - - } else if (Agent.class.isAssignableFrom(cls)) { - - var instance = new AgentImpl(); - instance.setName(node.get("name").asString()); - instance.setEmpty(false); - - ret.add((T) instance); - - } else if (Observation.class.isAssignableFrom(cls)) { - - var instance = new ObservationImpl(); - var reasoner = scope.getService(Reasoner.class); - - instance.setUrn(node.get("urn").asString()); - instance.setName(node.get("name").asString()); - instance.setObservable(reasoner.resolveObservable(node.get("observable").asString())); - instance.setResolved(node.get("resolved").asBoolean()); - instance.setId(node.get("id").asLong()); - - var gResult = query("MATCH (o:Observation)-[:HAS_GEOMETRY]->(g:Geometry) WHERE o.id" + - " = $id RETURN g", Map.of("id", node.get("id").asLong()), scope); - - if (gResult == null || !gResult.records().isEmpty()) { - instance.setGeometry(adapt(gResult, Geometry.class, scope).getFirst()); - } - - ret.add((T) instance); - - } else if (Activity.class.isAssignableFrom(cls)) { - var instance = new ActivityImpl(); - // TODO - instance.setStart(node.get("start").asLong()); - instance.setEnd(node.get("end").asLong()); - instance.setObservationUrn(node.get("observationUrn").asString()); - instance.setName(node.get("name").asString()); - instance.setServiceName(node.get("serviceName").isNull() ? null : - node.get("serviceName").asString()); - instance.setServiceId(node.get("serviceId").isNull() ? null : - node.get("serviceId").asString()); - instance.setServiceType(node.get("serviceType").isNull() ? null : - KlabService.Type.valueOf(node.get("serviceType").asString())); - instance.setDataflow(node.get("dataflow").isNull() ? null : node.get("dataflow").asString()); - instance.setType(Activity.Type.valueOf(instance.getName())); - instance.setDescription(node.get("description").isNull() ? "No description" : node.get( - "description").asString()); - instance.setId(node.get("id").asLong()); - ret.add((T) instance); - } else if (Actuator.class.isAssignableFrom(cls)) { - var instance = new ActuatorImpl(); - // TODO - ret.add((T) instance); - } else if (Plan.class.isAssignableFrom(cls)) { - var instance = new PlanImpl(); - // TODO - ret.add((T) instance); - } else if (Geometry.class.isAssignableFrom(cls)) { - // TODO use a cache storing scales - ret.add((T) GeometryRepository.INSTANCE.get(node.get("definition").asString(), - Geometry.class)); - } + // create and commit the activity record as a node, possibly linked to a parent + // activity. + + // open the transaction for the remaining operations + + var activity = new ActivityImpl(); + activity.setType(activityType); + activity.setStart(System.currentTimeMillis()); + activity.setName(activityType.name()); + + var ret = new OperationImpl(); + + ret.activity = activity; + ret.agent = agent; + + // select arguments and put them where they belong + if (data != null) { + for (var dat : data) { + if (dat instanceof String description) { + ret.activity.setDescription(description); + } else if (dat instanceof OperationImpl operation) { + ret.parent = operation; + } else if (dat instanceof KlabService service) { + activity.setServiceId(service.serviceId()); + activity.setServiceName(service.getServiceName()); + activity.setServiceType(KlabService.Type.classify(service)); + } else if (dat instanceof Dataflow dataflow) { + activity.setDataflow(new DataflowEncoder(dataflow, scope).toString()); } - return ret; + } } - @Override - public Agent user() { - return user; + KnowledgeGraphNeo4j.this.store(activity, scope); + KnowledgeGraphNeo4j.this.link(activity, agent, DigitalTwin.Relationship.BY_AGENT, scope); + if (parentActivity != null) { + KnowledgeGraphNeo4j.this.link( + parentActivity, activity, DigitalTwin.Relationship.TRIGGERED, scope); + } else { + KnowledgeGraphNeo4j.this.link( + provenanceNode, activity, DigitalTwin.Relationship.HAS_CHILD, scope); } - @Override - public Agent klab() { - return klab; - } - - @Override - public List getExistingContexts(UserScope scope) { - - var ret = new ArrayList(); - var result = scope == null - ? query( - "match (c:Context)<-[:CREATED]-(a:Activity) return c.id as contextId, a.start as " + - "startTime", - Map.of(), scope) - : query( - "match (c:Context {user: $username})<-[:CREATED]-(a:Activity) return c" + - ".name as" + - " contextName, c.id as contextId, a.start as startTime", - Map.of("username", scope.getUser().getUsername()), scope); - - for (var record : result.records()) { - ContextInfo info = new ContextInfo(); - info.setId(record.get("contextId").asString()); - info.setName(record.get("contextName").asString()); - info.setCreationTime(record.get("startTime").asLong()); - // TODO the rest - ret.add(info); + scope.send( + Message.MessageClass.ObservationLifecycle, + Message.MessageType.ActivityStarted, + ret.activity); + + // open transaction if we are the root operation. We only commit within it. + ret.transaction = + ret.parent == null + ? + // this open a new session per transaction. Probably expensive but safe as + // transactions can't co-occur within a session. + driver + .session() + .beginTransaction(TransactionConfig.builder().withTimeout(Duration.ZERO).build()) + : ret.parent.transaction; + + return ret; + } + + protected EagerResult query(String query, Map parameters, Scope scope) { + if (isOnline()) { + try { + // System.out.printf("\nQUERY " + query + "\n WITH " + parameters); + return driver.executableQuery(query).withParameters(parameters).execute(); + } catch (Throwable t) { + if (scope != null) { + scope.error(t.getMessage(), t); + } else { + Logging.INSTANCE.error(t); } - return ret; + } } - - @Override - public void clear() { - if (scope == null) { - driver.executableQuery("MATCH (n) DETACH DELETE n").execute(); + return null; + } + + protected Result query( + Transaction transaction, String query, Map parameters, Scope scope) { + if (isOnline()) { + try { + return transaction.run(query, parameters); + } catch (Throwable t) { + if (scope != null) { + scope.error(t.getMessage(), t); } else { - query(Queries.REMOVE_CONTEXT, Map.of("contextId", scope.getId()), scope); + Logging.INSTANCE.error(t); } + } } - - @Override - protected T retrieve(Object key, Class assetClass, Scope scope) { - var result = - assetClass == RuntimeAsset.class ? - query("MATCH (n {id: $id}) return n", Map.of("id", key), null) : - query("MATCH (n:{assetLabel} {id: $id}) return n".replace("{assetLabel}", - getLabel(assetClass)), Map.of("id", key), null); - var adapted = adapt(result, assetClass, scope); - return adapted.isEmpty() ? null : adapted.getFirst(); + return null; + } + + /** Ensure things are OK re: main agents and the like. Must be called only once */ + protected void initializeContext() { + + this.rootContextId = scope.getId(); + + var result = query(Queries.FIND_CONTEXT, Map.of("contextId", scope.getId()), scope); + + if (result.records().isEmpty()) { + long timestamp = System.currentTimeMillis(); + var activityId = nextKey(); + for (var query : Queries.INITIALIZATION_QUERIES) { + query( + query, + Map.of( + "contextId", scope.getId(), + "name", scope.getName(), + "timestamp", timestamp, + "username", scope.getUser().getUsername(), + "expirationType", scope.getPersistence().name(), + "activityId", activityId), + scope); + } } + final var dataflowNodeId = nextKey(); + final var provenanceNodeId = nextKey(); + final var contextNodeId = nextKey(); + + this.dataflowNode = + new RuntimeAsset() { + @Override + public long getId() { + return dataflowNodeId; + } + + @Override + public Type classify() { + // check - scope isn't a runtime asset + return Type.DATAFLOW; + } + }; - @Override - protected long store(RuntimeAsset asset, Scope scope, Object... additionalProperties) { - - var type = getLabel(asset); - var props = asParameters(asset, additionalProperties); - var ret = nextKey(); - props.put("id", ret); - var result = query( - Queries.CREATE_WITH_PROPERTIES.replace("{type}", type), - Map.of("properties", props), scope); - if (result != null && result.records().size() == 1) { - setId(asset, ret); - } + this.provenanceNode = + new RuntimeAsset() { + @Override + public long getId() { + return provenanceNodeId; + } + + @Override + public Type classify() { + // check - scope isn't a runtime asset + return Type.PROVENANCE; + } + }; - return ret; - } + this.contextNode = + new RuntimeAsset() { + @Override + public long getId() { + return contextNodeId; + } + + @Override + public Type classify() { + // as a marker - scope isn't a runtime asset + return Type.ARTIFACT; + } + }; - protected long store(Transaction transaction, RuntimeAsset asset, Scope scope, - Object... additionalProperties) { - - var type = getLabel(asset); - var props = asParameters(asset, additionalProperties); - var ret = nextKey(); - props.put("id", ret); - var result = query(transaction, - Queries.CREATE_WITH_PROPERTIES.replace("{type}", type), - Map.of("properties", props), scope); - if (result != null && result.hasNext()) { - - setId(asset, ret); - var geometry = switch (asset) { - case Observation observation -> observation.getGeometry(); - case Actuator actuator -> actuator.getCoverage(); - default -> null; - }; - - if (geometry != null) { - storeGeometry(geometry, asset); - } + this.user = + adapt( + query( + Queries.GET_AGENT_BY_NAME, + Map.of("contextId", scope.getId(), "agentName", scope.getUser().getUsername()), + scope), + Agent.class, + scope) + .getFirst(); + this.klab = + adapt( + query( + Queries.GET_AGENT_BY_NAME, + Map.of("contextId", scope.getId(), "agentName", "k.LAB"), + scope), + Agent.class, + scope) + .getFirst(); + } + + @Override + public void deleteContext() { + query(Queries.REMOVE_CONTEXT, Map.of("contextId", scope.getId()), scope); + } + + /** + * @param query + * @param cls + * @param + * @return + */ + protected List adapt(EagerResult query, Class cls, Scope scope) { + + List ret = new ArrayList<>(); + + for (var record : query.records()) { + + Value node = null; + Map properties = new HashMap<>(); + if (!record.values().isEmpty()) { + // must be one field for the node + node = record.values().getFirst(); + } + + if (node == null) { + continue; + } + + if (Map.class.isAssignableFrom(cls)) { + + ret.add((T) node.asMap(Map.of())); + + } else if (Agent.class.isAssignableFrom(cls)) { + + var instance = new AgentImpl(); + instance.setName(node.get("name").asString()); + instance.setEmpty(false); + + ret.add((T) instance); + + } else if (Observation.class.isAssignableFrom(cls)) { + + var instance = new ObservationImpl(); + var reasoner = scope.getService(Reasoner.class); + + instance.setUrn(node.get("urn").asString()); + instance.setName(node.get("name").asString()); + instance.setObservable(reasoner.resolveObservable(node.get("observable").asString())); + instance.setResolved(node.get("resolved").asBoolean()); + instance.setId(node.get("id").asLong()); + + var gResult = + query( + "MATCH (o:Observation)-[:HAS_GEOMETRY]->(g:Geometry) WHERE o.id" + + " = $id RETURN g", + Map.of("id", node.get("id").asLong()), + scope); + + if (gResult == null || !gResult.records().isEmpty()) { + instance.setGeometry(adapt(gResult, Geometry.class, scope).getFirst()); } - return ret; + ret.add((T) instance); + + } else if (Activity.class.isAssignableFrom(cls)) { + var instance = new ActivityImpl(); + // TODO + instance.setStart(node.get("start").asLong()); + instance.setEnd(node.get("end").asLong()); + instance.setObservationUrn(node.get("observationUrn").asString()); + instance.setName(node.get("name").asString()); + instance.setServiceName( + node.get("serviceName").isNull() ? null : node.get("serviceName").asString()); + instance.setServiceId( + node.get("serviceId").isNull() ? null : node.get("serviceId").asString()); + instance.setServiceType( + node.get("serviceType").isNull() + ? null + : KlabService.Type.valueOf(node.get("serviceType").asString())); + instance.setDataflow( + node.get("dataflow").isNull() ? null : node.get("dataflow").asString()); + instance.setType(Activity.Type.valueOf(instance.getName())); + instance.setDescription( + node.get("description").isNull() + ? "No description" + : node.get("description").asString()); + instance.setId(node.get("id").asLong()); + ret.add((T) instance); + } else if (Actuator.class.isAssignableFrom(cls)) { + var instance = new ActuatorImpl(); + // TODO + ret.add((T) instance); + } else if (Plan.class.isAssignableFrom(cls)) { + var instance = new PlanImpl(); + // TODO + ret.add((T) instance); + } else if (Geometry.class.isAssignableFrom(cls)) { + // TODO use a cache storing scales + ret.add( + (T) GeometryRepository.INSTANCE.get(node.get("definition").asString(), Geometry.class)); + } + } + return ret; + } + + @Override + public Agent user() { + return user; + } + + @Override + public Agent klab() { + return klab; + } + + @Override + public List getExistingContexts(UserScope scope) { + + var ret = new ArrayList(); + var result = + scope == null + ? query( + "match (c:Context)<-[:CREATED]-(a:Activity) return c.id as contextId, a.start as " + + "startTime", + Map.of(), + scope) + : query( + "match (c:Context {user: $username})<-[:CREATED]-(a:Activity) return c" + + ".name as" + + " contextName, c.id as contextId, a.start as startTime", + Map.of("username", scope.getUser().getUsername()), + scope); + + for (var record : result.records()) { + ContextInfo info = new ContextInfo(); + info.setId(record.get("contextId").asString()); + info.setName(record.get("contextName").asString()); + info.setCreationTime(record.get("startTime").asLong()); + // TODO the rest + ret.add(info); + } + return ret; + } + + @Override + public void clear() { + if (scope == null) { + driver.executableQuery("MATCH (n) DETACH DELETE n").execute(); + } else { + query(Queries.REMOVE_CONTEXT, Map.of("contextId", scope.getId()), scope); + } + } + + @Override + protected T retrieve(Object key, Class assetClass, Scope scope) { + var result = + assetClass == RuntimeAsset.class + ? query("MATCH (n {id: $id}) return n", Map.of("id", key), null) + : query( + "MATCH (n:{assetLabel} {id: $id}) return n" + .replace("{assetLabel}", getLabel(assetClass)), + Map.of("id", key), + null); + var adapted = adapt(result, assetClass, scope); + return adapted.isEmpty() ? null : adapted.getFirst(); + } + + @Override + protected long store(RuntimeAsset asset, Scope scope, Object... additionalProperties) { + + var type = getLabel(asset); + var props = asParameters(asset, additionalProperties); + var ret = nextKey(); + props.put("id", ret); + var result = + query( + Queries.CREATE_WITH_PROPERTIES.replace("{type}", type), + Map.of("properties", props), + scope); + if (result != null && result.records().size() == 1) { + setId(asset, ret); } - private void storeGeometry(Geometry geometry, RuntimeAsset asset) { - - // TODO have a multi-cache ordered by size + return ret; + } + + protected long store( + Transaction transaction, RuntimeAsset asset, Scope scope, Object... additionalProperties) { + + var type = getLabel(asset); + var props = asParameters(asset, additionalProperties); + var ret = nextKey(); + props.put("id", ret); + var result = + query( + transaction, + Queries.CREATE_WITH_PROPERTIES.replace("{type}", type), + Map.of("properties", props), + scope); + if (result != null && result.hasNext()) { + + setId(asset, ret); + var geometry = + switch (asset) { + case Observation observation -> observation.getGeometry(); + case Actuator actuator -> actuator.getCoverage(); + default -> null; + }; - // Must be called after update() and this may happen more than once, so we must check to avoid - // multiple relationships. - var exists = - query("MATCH (n:{assetLabel} {id: $assetId})-[:HAS_GEOMETRY]->(g:Geometry) RETURN g".replace("{assetLabel}", - getLabel(asset)), Map.of("assetId", getId(asset)), scope); + if (geometry != null) { + storeGeometry(geometry, asset); + } + } - if (exists != null && !exists.records().isEmpty()) { - return; - } + return ret; + } - if (!(geometry instanceof Scale)) { - // only record fully specified scales, not syntactic specifications - geometry = Scale.create(geometry); - } + private void storeGeometry(Geometry geometry, RuntimeAsset asset) { - double coverage = geometry instanceof Coverage cov ? cov.getCoverage() : 1.0; - - // the idea is that looking up the size before the monster string can be faster. - var query = "MATCH (g:Geometry) WHERE g.size = $size AND g.definition = $definition RETURN g"; - long id; - var result = query(query, Map.of("size", geometry.size(), "definition", geometry.encode()), scope); - if (result == null || result.records().isEmpty()) { - id = nextKey(); - // TODO more geometry data (bounding box, time boundaries etc.) - query("CREATE (g:Geometry {size: $size, definition: $definition, id: $id}) RETURN g", Map.of( - "size", - geometry.size(), "definition", geometry.encode(), "id", id), scope); - } else { - id = result.records().getFirst().values().getFirst().get("id").asLong(); - } + // TODO have a multi-cache ordered by size - // TODO more properties pertaining to the link (e.g. separate space/time coverages etc) - var properties = Map.of("coverage", coverage); + // Must be called after update() and this may happen more than once, so we must check to avoid + // multiple relationships. + var exists = + query( + "MATCH (n:{assetLabel} {id: $assetId})-[:HAS_GEOMETRY]->(g:Geometry) RETURN g" + .replace("{assetLabel}", getLabel(asset)), + Map.of("assetId", getId(asset)), + scope); - // link it with the associated coverage - var rel = query(("MATCH (n:{assetLabel}), (g:Geometry) WHERE n.id = $assetId AND g.id = $geometryId" + - " CREATE (n)" + - "-[r:HAS_GEOMETRY]->(g) SET r = $properties RETURN r").replace("{assetLabel}", - getLabel(asset)), - Map.of("assetId", getId(asset), "geometryId", id, "properties", properties), scope); + if (exists != null && !exists.records().isEmpty()) { + return; } - @Override - protected void link(RuntimeAsset source, RuntimeAsset destination, - DigitalTwin.Relationship relationship, Scope scope, - Object... additionalProperties) { - - // find out if the internal ID or what stored ID should be used - var sourceQuery = matchAsset(source, "n", "sourceId"); - var targetQuery = matchAsset(destination, "c", "targetId"); - var props = asParameters(null, additionalProperties); - var query = ("match (n:{fromLabel}), (c:{toLabel}) WHERE {sourceQuery} AND {targetQuery} CREATE (n)" + - "-[r:{relationshipLabel}]->(c) SET r = $properties RETURN r") - .replace("{sourceQuery}", sourceQuery) - .replace("{targetQuery}", targetQuery) - .replace("{relationshipLabel}", relationship.name()) - .replace("{fromLabel}", getLabel(source)) - .replace("{toLabel}", getLabel(destination)); - - query(query, Map.of("sourceId", getId(source), "targetId", getId(destination), "properties", props) - , scope); + if (!(geometry instanceof Scale)) { + // only record fully specified scales, not syntactic specifications + geometry = Scale.create(geometry); } - protected void link(Transaction transaction, RuntimeAsset source, RuntimeAsset destination, - DigitalTwin.Relationship relationship, Scope scope, - Object... additionalProperties) { - - // find out if the internal ID or what stored ID should be used - var sourceQuery = matchAsset(source, "n", "sourceId"); - var targetQuery = matchAsset(destination, "c", "targetId"); - var props = asParameters(null, additionalProperties); - var query = ("MATCH (n:{fromLabel}), (c:{toLabel}) WHERE {sourceQuery} AND {targetQuery} CREATE (n)" + - "-[r:{relationshipLabel}]->(c) SET r = $properties RETURN r") - .replace("{sourceQuery}", sourceQuery) - .replace("{targetQuery}", targetQuery) - .replace("{relationshipLabel}", relationship.name()) - .replace("{fromLabel}", getLabel(source)) - .replace("{toLabel}", getLabel(destination)); - - query(transaction, query, Map.of("sourceId", getId(source), "targetId", getId(destination), - "properties", props) - , scope); + double coverage = geometry instanceof Coverage cov ? cov.getCoverage() : 1.0; + + // the idea is that looking up the size before the monster string can be faster. + var query = "MATCH (g:Geometry) WHERE g.size = $size AND g.definition = $definition RETURN g"; + long id; + var result = + query(query, Map.of("size", geometry.size(), "definition", geometry.encode()), scope); + if (result == null || result.records().isEmpty()) { + id = nextKey(); + // TODO more geometry data (bounding box, time boundaries etc.) + query( + "CREATE (g:Geometry {size: $size, definition: $definition, id: $id}) RETURN g", + Map.of("size", geometry.size(), "definition", geometry.encode(), "id", id), + scope); + } else { + id = result.records().getFirst().values().getFirst().get("id").asLong(); } - private String matchAsset(RuntimeAsset asset, String name, String queryVariable) { - - var ret = switch (asset) { - case Activity activity -> name + ".id = $" + queryVariable; - case Observation observation -> name + ".id = $" + queryVariable; - case Actuator actuator -> name + ".id = $" + queryVariable; - case Agent agent -> name + ".name = $" + queryVariable; - default -> null; + // TODO more properties pertaining to the link (e.g. separate space/time coverages etc) + var properties = Map.of("coverage", coverage); + + // link it with the associated coverage + var rel = + query( + ("MATCH (n:{assetLabel}), (g:Geometry) WHERE n.id = $assetId AND g.id = $geometryId" + + " CREATE (n)" + + "-[r:HAS_GEOMETRY]->(g) SET r = $properties RETURN r") + .replace("{assetLabel}", getLabel(asset)), + Map.of("assetId", getId(asset), "geometryId", id, "properties", properties), + scope); + } + + @Override + protected void link( + RuntimeAsset source, + RuntimeAsset destination, + DigitalTwin.Relationship relationship, + Scope scope, + Object... additionalProperties) { + + // find out if the internal ID or what stored ID should be used + var sourceQuery = matchAsset(source, "n", "sourceId"); + var targetQuery = matchAsset(destination, "c", "targetId"); + var props = asParameters(null, additionalProperties); + var query = + ("match (n:{fromLabel}), (c:{toLabel}) WHERE {sourceQuery} AND {targetQuery} CREATE (n)" + + "-[r:{relationshipLabel}]->(c) SET r = $properties RETURN r") + .replace("{sourceQuery}", sourceQuery) + .replace("{targetQuery}", targetQuery) + .replace("{relationshipLabel}", relationship.name()) + .replace("{fromLabel}", getLabel(source)) + .replace("{toLabel}", getLabel(destination)); + + query( + query, + Map.of("sourceId", getId(source), "targetId", getId(destination), "properties", props), + scope); + } + + protected void link( + Transaction transaction, + RuntimeAsset source, + RuntimeAsset destination, + DigitalTwin.Relationship relationship, + Scope scope, + Object... additionalProperties) { + + // find out if the internal ID or what stored ID should be used + var sourceQuery = matchAsset(source, "n", "sourceId"); + var targetQuery = matchAsset(destination, "c", "targetId"); + var props = asParameters(null, additionalProperties); + var query = + ("MATCH (n:{fromLabel}), (c:{toLabel}) WHERE {sourceQuery} AND {targetQuery} CREATE (n)" + + "-[r:{relationshipLabel}]->(c) SET r = $properties RETURN r") + .replace("{sourceQuery}", sourceQuery) + .replace("{targetQuery}", targetQuery) + .replace("{relationshipLabel}", relationship.name()) + .replace("{fromLabel}", getLabel(source)) + .replace("{toLabel}", getLabel(destination)); + + query( + transaction, + query, + Map.of("sourceId", getId(source), "targetId", getId(destination), "properties", props), + scope); + } + + private String matchAsset(RuntimeAsset asset, String name, String queryVariable) { + + var ret = + switch (asset) { + case Activity activity -> name + ".id = $" + queryVariable; + case Observation observation -> name + ".id = $" + queryVariable; + case Actuator actuator -> name + ".id = $" + queryVariable; + case Agent agent -> name + ".name = $" + queryVariable; + default -> null; }; - if (ret == null) { - ret = switch (asset.classify()) { - case ARTIFACT, DATAFLOW, PROVENANCE -> name + ".id = $" + queryVariable; - default -> throw new KlabIllegalStateException("Unexpected value: " + asset.classify()); - }; - } - - return ret == null ? (ret = name + ".id = $" + queryVariable) : ret; + if (ret == null) { + ret = + switch (asset.classify()) { + case ARTIFACT, DATAFLOW, PROVENANCE -> name + ".id = $" + queryVariable; + default -> throw new KlabIllegalStateException("Unexpected value: " + asset.classify()); + }; } - private Object getId(RuntimeAsset asset) { + return ret == null ? (ret = name + ".id = $" + queryVariable) : ret; + } - Object ret = switch (asset) { - case ActuatorImpl actuator -> actuator.getInternalId(); - case ActivityImpl activity -> activity.getId(); - case ObservationImpl observation -> observation.getId(); - case Agent agent -> agent.getName(); - default -> null; + private Object getId(RuntimeAsset asset) { + + Object ret = + switch (asset) { + case ActuatorImpl actuator -> actuator.getInternalId(); + case ActivityImpl activity -> activity.getId(); + case ObservationImpl observation -> observation.getId(); + case Agent agent -> agent.getName(); + default -> null; }; - if (ret == null) { - // it's one of the preset ones - ret = switch (asset.classify()) { - case ARTIFACT -> scope.getId(); - case DATAFLOW -> scope.getId() + ".DATAFLOW"; - case PROVENANCE -> scope.getId() + ".PROVENANCE"; - default -> throw new KlabIllegalStateException("Unexpected value: " + asset.classify()); - }; - } - return ret; + if (ret == null) { + // it's one of the preset ones + ret = + switch (asset.classify()) { + case ARTIFACT -> scope.getId(); + case DATAFLOW -> scope.getId() + ".DATAFLOW"; + case PROVENANCE -> scope.getId() + ".PROVENANCE"; + default -> throw new KlabIllegalStateException("Unexpected value: " + asset.classify()); + }; } - - private void setId(RuntimeAsset asset, long id) { - switch (asset) { - case ObservationImpl observation -> { - observation.setId(id); - observation.setUrn(scope.getId() + "." + id); - } - case ActuatorImpl actuator -> actuator.setInternalId(id); - case ActivityImpl activity -> activity.setId(id); - case AgentImpl agent -> agent.setId(id); - default -> { - } - } + return ret; + } + + private void setId(RuntimeAsset asset, long id) { + switch (asset) { + case ObservationImpl observation -> { + observation.setId(id); + observation.setUrn(scope.getId() + "." + id); + } + case ActuatorImpl actuator -> actuator.setInternalId(id); + case ActivityImpl activity -> activity.setId(id); + case AgentImpl agent -> agent.setId(id); + default -> {} } + } - - @Override - protected RuntimeAsset getDataflowNode() { - if (scope == null) { - throw new KlabIllegalStateException("Access to context node in a non-contexual knowledge graph"); - } - return dataflowNode; + @Override + protected RuntimeAsset getDataflowNode() { + if (scope == null) { + throw new KlabIllegalStateException( + "Access to context node in a non-contexual knowledge graph"); } - - @Override - protected RuntimeAsset getProvenanceNode() { - if (scope == null) { - throw new KlabIllegalStateException("Access to context node in a non-contexual knowledge graph"); - } - return provenanceNode; + return dataflowNode; + } + + @Override + protected RuntimeAsset getProvenanceNode() { + if (scope == null) { + throw new KlabIllegalStateException( + "Access to context node in a non-contexual knowledge graph"); } + return provenanceNode; + } - private String encodeGeometry(Geometry observationGeometry) { - - /* - * Ensure that the shape parameter is in WKB and any prescriptive grid parameters are resolved. - * TODO we should cache the geometries and scales, then reuse them. - */ - var ret = Scale.create(observationGeometry).encode(ShapeImpl.wkbEncoder); - - return ret; + private String encodeGeometry(Geometry observationGeometry) { + /* + * Ensure that the shape parameter is in WKB and any prescriptive grid parameters are resolved. + * TODO we should cache the geometries and scales, then reuse them. + */ + var ret = Scale.create(observationGeometry).encode(ShapeImpl.wkbEncoder); + + return ret; + } + + private String getLabel(Object target) { + + if (target instanceof Class cls) { + if (Observation.class.isAssignableFrom(cls)) { + return "Observation"; + } else if (Activity.class.isAssignableFrom(cls)) { + return "Activity"; + } else if (Actuator.class.isAssignableFrom(cls)) { + return "Actuator"; + } else if (Agent.class.isAssignableFrom(cls)) { + return "Agent"; + } else if (Plan.class.isAssignableFrom(cls)) { + return "Plan"; + } } - private String getLabel(Object target) { - - if (target instanceof Class cls) { - if (Observation.class.isAssignableFrom(cls)) { - return "Observation"; - } else if (Activity.class.isAssignableFrom(cls)) { - return "Activity"; - } else if (Actuator.class.isAssignableFrom(cls)) { - return "Actuator"; - } else if (Agent.class.isAssignableFrom(cls)) { - return "Agent"; - } else if (Plan.class.isAssignableFrom(cls)) { - return "Plan"; - } - } - - var ret = switch (target) { - case Observation x -> "Observation"; - case Activity x -> "Activity"; - case Actuator x -> "Actuator"; - case Agent x -> "Agent"; - case Plan x -> "Plan"; - default -> null; + var ret = + switch (target) { + case Observation x -> "Observation"; + case Activity x -> "Activity"; + case Actuator x -> "Actuator"; + case Agent x -> "Agent"; + case Plan x -> "Plan"; + default -> null; }; - if (ret == null && target instanceof RuntimeAsset runtimeAsset) { - ret = switch (runtimeAsset.classify()) { - case ARTIFACT -> "Context"; - case DATAFLOW -> "Dataflow"; - case PROVENANCE -> "Provenance"; - default -> - throw new KlabIllegalStateException("Unexpected value: " + runtimeAsset.classify()); - }; - } - - if (ret == null) { - throw new KlabIllegalArgumentException( - "Cannot store " + target.getClass().getCanonicalName() + " in knowledge graph"); - } - - return ret; + if (ret == null && target instanceof RuntimeAsset runtimeAsset) { + ret = + switch (runtimeAsset.classify()) { + case ARTIFACT -> "Context"; + case DATAFLOW -> "Dataflow"; + case PROVENANCE -> "Provenance"; + default -> + throw new KlabIllegalStateException("Unexpected value: " + runtimeAsset.classify()); + }; } - @Override - public void update(RuntimeAsset runtimeAsset, ContextScope scope, Object... parameters) { - var props = asParameters(runtimeAsset, parameters); - props.remove("id"); - var result = query(Queries.UPDATE_PROPERTIES.replace("{type}", getLabel(runtimeAsset)), - Map.of("id", (runtimeAsset instanceof ActuatorImpl actuator ? actuator.getInternalId() : - runtimeAsset.getId()), "properties", props), scope); + if (ret == null) { + throw new KlabIllegalArgumentException( + "Cannot store " + target.getClass().getCanonicalName() + " in knowledge graph"); } - @Override - protected synchronized long nextKey() { - var ret = -1L; - var lastActivity = System.currentTimeMillis(); - var result = query("MATCH (n:Statistics) return n.nextId", Map.of(), scope); - if (result != null) { - if (result.records().isEmpty()) { - ret = 1; - query("CREATE (n:Statistics {nextId: 1})", Map.of(), scope); - } else { - var id = result.records().getFirst().get(result.keys().getFirst()).asLong(); - ret = id + 1; - query("MATCH (n:Statistics) WHERE n.nextId = $id SET n.nextId = $nextId, n.lastActivity = " + - "$lastActivity", Map.of("id", id, "nextId" - , ret, "lastActivity", lastActivity), scope); - } - } - return ret; + return ret; + } + + @Override + public void update(RuntimeAsset runtimeAsset, ContextScope scope, Object... parameters) { + var props = asParameters(runtimeAsset, parameters); + props.remove("id"); + var result = + query( + Queries.UPDATE_PROPERTIES.replace("{type}", getLabel(runtimeAsset)), + Map.of( + "id", + (runtimeAsset instanceof ActuatorImpl actuator + ? actuator.getInternalId() + : runtimeAsset.getId()), + "properties", + props), + scope); + } + + @Override + protected synchronized long nextKey() { + var ret = -1L; + var lastActivity = System.currentTimeMillis(); + var result = query("MATCH (n:Statistics) return n.nextId", Map.of(), scope); + if (result != null) { + if (result.records().isEmpty()) { + ret = 1; + query("CREATE (n:Statistics {nextId: 1})", Map.of(), scope); + } else { + var id = result.records().getFirst().get(result.keys().getFirst()).asLong(); + ret = id + 1; + query( + "MATCH (n:Statistics) WHERE n.nextId = $id SET n.nextId = $nextId, n.lastActivity = " + + "$lastActivity", + Map.of("id", id, "nextId", ret, "lastActivity", lastActivity), + scope); + } + } + return ret; + } + + @Override + public List get( + ContextScope scope, Class resultClass, Object... queriables) { + + if (Activity.class.isAssignableFrom(resultClass)) { + return (List) getActivity(scope, queriables); + } else if (Observation.class.isAssignableFrom(resultClass)) { + return (List) getObservation(scope, queriables); + } else if (Agent.class.isAssignableFrom(resultClass)) { + return (List) getAgent(scope, queriables); + } else if (Actuator.class.isAssignableFrom(resultClass)) { + return (List) getActuator(scope, queriables); } - @Override - public List get(ContextScope scope, Class resultClass, - Object... queriables) { - - if (Activity.class.isAssignableFrom(resultClass)) { - return (List) getActivity(scope, queriables); - } else if (Observation.class.isAssignableFrom(resultClass)) { - return (List) getObservation(scope, queriables); - } else if (Agent.class.isAssignableFrom(resultClass)) { - return (List) getAgent(scope, queriables); - } else if (Actuator.class.isAssignableFrom(resultClass)) { - return (List) getActuator(scope, queriables); + // This is only in case we ask for any RuntimeAsset + Map queryParameters = new LinkedHashMap<>(); + if (queriables != null) { + for (var parameter : queriables) { + if (parameter instanceof Observable observable) { + queryParameters.put("semantics", observable.getSemantics().getUrn()); + } else if (parameter instanceof Long id) { + queryParameters.put("id", id); + } else if (parameter instanceof Observation observation) { + // define start node as the one with the observation URN + } else if (parameter instanceof Activity.Type activityType) { + if (Activity.class.isAssignableFrom(resultClass)) { + queryParameters.put("name", activityType.name()); + } } + } + } - // This is only in case we ask for any RuntimeAsset - Map queryParameters = new LinkedHashMap<>(); - if (queriables != null) { - for (var parameter : queriables) { - if (parameter instanceof Observable observable) { - queryParameters.put("semantics", observable.getSemantics().getUrn()); - } else if (parameter instanceof Long id) { - queryParameters.put("id", id); - } else if (parameter instanceof Observation observation) { - // define start node as the one with the observation URN - } else if (parameter instanceof Activity.Type activityType) { - if (Activity.class.isAssignableFrom(resultClass)) { - queryParameters.put("name", activityType.name()); - } - } - } - } + if (queryParameters.containsKey("id") && RuntimeAsset.class.isAssignableFrom(resultClass)) { + return List.of(retrieve(queryParameters.get("id"), resultClass, scope)); + } - if (queryParameters.containsKey("id") && RuntimeAsset.class.isAssignableFrom(resultClass)) { - return List.of(retrieve(queryParameters.get("id"), resultClass, scope)); - } + StringBuilder locator = new StringBuilder("MATCH (c:Context {id: $contextId})"); + var scopeData = ContextScope.parseScopeId(ContextScope.getScopeId(scope)); + if (scopeData.observationPath() != null) { + for (var observationId : scopeData.observationPath()) { + locator.append("-[:HAS_CHILD]->(Observation {id: ").append(observationId).append("})"); + } + } + if (scopeData.observerId() != Observation.UNASSIGNED_ID) { + // TODO needs a locator for the obs to POSTPONE to the query with reversed direction + // .....(n..)<-[:HAS_OBSERVER]-(observer:Observation {id: ...}) + } - StringBuilder locator = new StringBuilder("MATCH (c:Context {id: $contextId})"); - var scopeData = ContextScope.parseScopeId(ContextScope.getScopeId(scope)); - if (scopeData.observationPath() != null) { - for (var observationId : scopeData.observationPath()) { - locator.append("-[:HAS_CHILD]->(Observation {id: ").append(observationId).append("})"); - } - } - if (scopeData.observerId() != Observation.UNASSIGNED_ID) { - // TODO needs a locator for the obs to POSTPONE to the query with reversed direction - // .....(n..)<-[:HAS_OBSERVER]-(observer:Observation {id: ...}) + /* + * build the final query. For now the relationship is always HAS_CHILD and this only navigates + * child + * hierarchies. + */ + String label = getLabel(resultClass); + StringBuilder query = new StringBuilder(locator).append("-[:HAS_CHILD]->").append(label); + + if (!queryParameters.isEmpty()) { + query.append(" {"); + int n = 0; + for (var key : queryParameters.keySet()) { + if (n > 0) { + query.append(", "); } + query.append(key).append(": $").append(key); + n++; + } + query.append("}"); + } - /* - * build the final query. For now the relationship is always HAS_CHILD and this only navigates - * child - * hierarchies. - */ - String label = getLabel(resultClass); - StringBuilder query = new StringBuilder(locator).append("-[:HAS_CHILD]->").append(label); - - if (!queryParameters.isEmpty()) { - query.append(" {"); - int n = 0; - for (var key : queryParameters.keySet()) { - if (n > 0) { - query.append(", "); - } - query.append(key).append(": $").append(key); - n++; - } - query.append("}"); - } + queryParameters.put("contextId", scope.getId()); + var result = query(query.append(") return o").toString(), queryParameters, scope); - queryParameters.put("contextId", scope.getId()); - var result = query(query.append(") return o").toString(), queryParameters, scope); + return adapt(result, resultClass, scope); + } - return adapt(result, resultClass, scope); - } + private List getActivity(ContextScope scope, Object... queriables) { - private List getActivity(ContextScope scope, Object... queriables) { - - Map queryParameters = new LinkedHashMap<>(); - - Activity rootActivity = null; - if (queriables != null) { - for (var parameter : queriables) { - if (parameter instanceof Activity root) { - rootActivity = root; - } else if (parameter instanceof Long id) { - queryParameters.put("id", id); - } else if (parameter instanceof Observation observation) { - queryParameters.put("observationUrn", observation.getUrn()); - } else if (parameter instanceof Activity.Type activityType) { - queryParameters.put("type", activityType.name()); - } - } - } + Map queryParameters = new LinkedHashMap<>(); - var query = assetQuery("a", "Activity", queryParameters.keySet()); - if (rootActivity != null) { - query.append("<-[*]-(r:Activity {id: $rootActivityId})"); - queryParameters.put("rootActivityId", rootActivity.getId()); - } else { - query.append("<-[*]-(p:Provenance {id: $provenanceId})"); - queryParameters.put("provenanceId", scope.getId() + ".PROVENANCE"); + Activity rootActivity = null; + if (queriables != null) { + for (var parameter : queriables) { + if (parameter instanceof Activity root) { + rootActivity = root; + } else if (parameter instanceof Long id) { + queryParameters.put("id", id); + } else if (parameter instanceof Observation observation) { + queryParameters.put("observationUrn", observation.getUrn()); + } else if (parameter instanceof Activity.Type activityType) { + queryParameters.put("type", activityType.name()); } - - var result = query(query.append(" return a").toString(), queryParameters, scope); - return adapt(result, Activity.class, scope); + } } - private StringBuilder assetQuery(String variableName, String assetLabel, Collection keys) { - - var ret = new StringBuilder("MATCH (").append(variableName).append(":").append(assetLabel); - - if (keys.isEmpty()) { - ret.append(")"); - } else { - int n = 0; - for (String key : keys) { - ret.append(n == 0 ? " {" : ", "); - ret.append(key).append(": $").append(key); - n++; - } - ret.append("})"); - } + var query = assetQuery("a", "Activity", queryParameters.keySet()); + if (rootActivity != null) { + query.append("<-[*]-(r:Activity {id: $rootActivityId})"); + queryParameters.put("rootActivityId", rootActivity.getId()); + } else { + query.append("<-[*]-(p:Provenance {id: $provenanceId})"); + queryParameters.put("provenanceId", scope.getId() + ".PROVENANCE"); + } - return ret; + var result = query(query.append(" return a").toString(), queryParameters, scope); + return adapt(result, Activity.class, scope); + } + + private StringBuilder assetQuery( + String variableName, String assetLabel, Collection keys) { + + var ret = new StringBuilder("MATCH (").append(variableName).append(":").append(assetLabel); + + if (keys.isEmpty()) { + ret.append(")"); + } else { + int n = 0; + for (String key : keys) { + ret.append(n == 0 ? " {" : ", "); + ret.append(key).append(": $").append(key); + n++; + } + ret.append("})"); } - private List getAgent(ContextScope scope, Object... queriables) { - - Map queryParameters = new LinkedHashMap<>(); - var query = new StringBuilder(getScopeQuery(scope, queryParameters) + "-[:HAS_PROVENANCE]->" + - "(p:Provenance)"); - - if (queriables != null) { - for (var parameter : queriables) { - if (parameter instanceof Observable observable) { - // - } else if (parameter instanceof Activity rootActivity) { - } else if (parameter instanceof Long id) { - queryParameters.put("id", id); - query = new StringBuilder("MATCH (a:Agent {id: $id}"); - } else if (parameter instanceof Observation observation) { - // define start node as the one with the observation URN - } else if (parameter instanceof String name) { - queryParameters.put("name", name); - query = new StringBuilder("MATCH (a:Agent {name: $name}"); - } - } + return ret; + } + + private List getAgent(ContextScope scope, Object... queriables) { + + Map queryParameters = new LinkedHashMap<>(); + var query = + new StringBuilder( + getScopeQuery(scope, queryParameters) + "-[:HAS_PROVENANCE]->" + "(p:Provenance)"); + + if (queriables != null) { + for (var parameter : queriables) { + if (parameter instanceof Observable observable) { + // + } else if (parameter instanceof Activity rootActivity) { + } else if (parameter instanceof Long id) { + queryParameters.put("id", id); + query = new StringBuilder("MATCH (a:Agent {id: $id}"); + } else if (parameter instanceof Observation observation) { + // define start node as the one with the observation URN + } else if (parameter instanceof String name) { + queryParameters.put("name", name); + query = new StringBuilder("MATCH (a:Agent {name: $name}"); } - - var result = query(query.append(") return a").toString(), queryParameters, scope); - return adapt(result, Agent.class, scope); + } } - private List getObservation(ContextScope scope, Object... queriables) { - - Map queryParameters = new LinkedHashMap<>(); - var query = new StringBuilder(getScopeQuery(scope, queryParameters)); - - if (queriables != null) { - for (var parameter : queriables) { - if (parameter instanceof Observable observable) { - queryParameters.put("semantics", observable.getSemantics().getUrn()); - query.append("MATCH (o:Observation {semantics: $semantics}"); - } else if (parameter instanceof Activity rootActivity) { - } else if (parameter instanceof Long id) { - queryParameters.put("id", id); - query = new StringBuilder("MATCH (o:Observation {id: $id}"); - } else if (parameter instanceof Observation observation) { - // define start node as the one with the observation URN - } else if (parameter instanceof String urn) { - queryParameters.put("urn", urn); - } - } + var result = query(query.append(") return a").toString(), queryParameters, scope); + return adapt(result, Agent.class, scope); + } + + private List getObservation(ContextScope scope, Object... queriables) { + + Map queryParameters = new LinkedHashMap<>(); + var query = new StringBuilder(getScopeQuery(scope, queryParameters)); + + if (queriables != null) { + for (var parameter : queriables) { + if (parameter instanceof Observable observable) { + queryParameters.put("semantics", observable.getSemantics().getUrn()); + query.append("MATCH (o:Observation {semantics: $semantics}"); + } else if (parameter instanceof Activity rootActivity) { + } else if (parameter instanceof Long id) { + queryParameters.put("id", id); + query = new StringBuilder("MATCH (o:Observation {id: $id}"); + } else if (parameter instanceof Observation observation) { + // define start node as the one with the observation URN + } else if (parameter instanceof String urn) { + queryParameters.put("urn", urn); } - - var result = query(query.append(") return o").toString(), queryParameters, scope); - return adapt(result, Observation.class, scope); + } } - private List getActuator(ContextScope scope, Object... queriables) { - Map queryParameters = new LinkedHashMap<>(); - var query = new StringBuilder(getScopeQuery(scope, queryParameters)); - - if (queriables != null) { - for (var parameter : queriables) { - if (parameter instanceof Observable observable) { - // - } else if (parameter instanceof Activity rootActivity) { - } else if (parameter instanceof Long id) { - queryParameters.put("id", id); - query = new StringBuilder("MATCH (n:Actuator {id: $id})"); - } else if (parameter instanceof Observation observation) { - // define start node as the one with the observation URN - } else if (parameter instanceof String name) { - queryParameters.put("name", name); - query.append("MATCH (n:Actuator {name: $name})"); - } - } + var result = query(query.append(") return o").toString(), queryParameters, scope); + return adapt(result, Observation.class, scope); + } + + private List getActuator(ContextScope scope, Object... queriables) { + Map queryParameters = new LinkedHashMap<>(); + var query = new StringBuilder(getScopeQuery(scope, queryParameters)); + + if (queriables != null) { + for (var parameter : queriables) { + if (parameter instanceof Observable observable) { + // + } else if (parameter instanceof Activity rootActivity) { + } else if (parameter instanceof Long id) { + queryParameters.put("id", id); + query = new StringBuilder("MATCH (n:Actuator {id: $id})"); + } else if (parameter instanceof Observation observation) { + // define start node as the one with the observation URN + } else if (parameter instanceof String name) { + queryParameters.put("name", name); + query.append("MATCH (n:Actuator {name: $name})"); } - - var result = query(query.append(" return n").toString(), queryParameters, scope); - return adapt(result, Actuator.class, scope); + } } - private String getScopeQuery(ContextScope scope, Map parameters) { + var result = query(query.append(" return n").toString(), queryParameters, scope); + return adapt(result, Actuator.class, scope); + } - var scopeData = ContextScope.parseScopeId(ContextScope.getScopeId(scope)); - var ret = new StringBuilder("MATCH (c:Context {id: $contextId})"); - parameters.put("contextId", scopeData.scopeId()); + private String getScopeQuery(ContextScope scope, Map parameters) { - if (scopeData.observationPath() != null) { - for (var observationId : scopeData.observationPath()) { - ret.append("-[:HAS_CHILD]->(Observation {id: ").append(observationId).append("})"); - } - } - if (scopeData.observerId() != Observation.UNASSIGNED_ID) { - // TODO needs a locator for the obs to POSTPONE to the query with reversed direction - // .....(n..)<-[:HAS_OBSERVER]-(observer:Observation {id: ...}) - } + var scopeData = ContextScope.parseScopeId(ContextScope.getScopeId(scope)); + var ret = new StringBuilder("MATCH (c:Context {id: $contextId})"); + parameters.put("contextId", scopeData.scopeId()); - return ret.toString(); + if (scopeData.observationPath() != null) { + for (var observationId : scopeData.observationPath()) { + ret.append("-[:HAS_CHILD]->(Observation {id: ").append(observationId).append("})"); + } } - - @Override - public Agent requireAgent(String agentName) { - if ("k.LAB".equals(agentName)) { - return klab; - } else if (scope.getUser().getUsername().equals(agentName)) { - return user; - } else if (agentName != null) { - // TODO create agent - } - return user; + if (scopeData.observerId() != Observation.UNASSIGNED_ID) { + // TODO needs a locator for the obs to POSTPONE to the query with reversed direction + // .....(n..)<-[:HAS_OBSERVER]-(observer:Observation {id: ...}) } - @Override - public List getSessionInfo(Scope scope) { - - var sessionIds = new LinkedHashMap(); - EagerResult contexts = switch (scope) { - case ContextScope contextScope -> - query("match(c:Context {id: $contextId}) return c", Map.of("contextId", - contextScope.getId()) - , scope); - case SessionScope sessionScope -> - query("match (c:Context) WHERE c.id STARTS WITH $sessionId return c", Map.of( - "sessionId", - sessionScope.getId() + "."), scope); - case UserScope userScope -> query("match(c:Context {user: $user}) return (c)", Map.of("user", - userScope.getUser().getUsername()), scope); - default -> throw new KlabIllegalStateException("Unexpected value: " + scope); + return ret.toString(); + } + + @Override + public Agent requireAgent(String agentName) { + if ("k.LAB".equals(agentName)) { + return klab; + } else if (scope.getUser().getUsername().equals(agentName)) { + return user; + } else if (agentName != null) { + // TODO create agent + } + return user; + } + + @Override + public List getSessionInfo(Scope scope) { + + var sessionIds = new LinkedHashMap(); + EagerResult contexts = + switch (scope) { + case ContextScope contextScope -> + query( + "match(c:Context {id: $contextId}) return c", + Map.of("contextId", contextScope.getId()), + scope); + case SessionScope sessionScope -> + query( + "match (c:Context) WHERE c.id STARTS WITH $sessionId return c", + Map.of("sessionId", sessionScope.getId() + "."), + scope); + case UserScope userScope -> + query( + "match(c:Context {user: $user}) return (c)", + Map.of("user", userScope.getUser().getUsername()), + scope); + default -> throw new KlabIllegalStateException("Unexpected value: " + scope); }; - List contextInfos = new ArrayList<>(); - for (var context : adapt(contexts, Map.class, scope)) { - ContextInfo contextInfo = new ContextInfo(); - contextInfo.setId(context.get("id").toString()); - contextInfo.setCreationTime((Long) context.get("created")); - contextInfo.setName(context.get("name").toString()); - contextInfo.setUser(context.get("user").toString()); - contextInfos.add(contextInfo); - } + List contextInfos = new ArrayList<>(); + for (var context : adapt(contexts, Map.class, scope)) { + ContextInfo contextInfo = new ContextInfo(); + contextInfo.setId(context.get("id").toString()); + contextInfo.setCreationTime((Long) context.get("created")); + contextInfo.setName(context.get("name").toString()); + contextInfo.setUser(context.get("user").toString()); + contextInfos.add(contextInfo); + } - contextInfos.sort(new Comparator() { - @Override - public int compare(ContextInfo o1, ContextInfo o2) { - return Long.compare(o1.getCreationTime(), o2.getCreationTime()); - } + contextInfos.sort( + new Comparator() { + @Override + public int compare(ContextInfo o1, ContextInfo o2) { + return Long.compare(o1.getCreationTime(), o2.getCreationTime()); + } }); - // collect sessions - for (var context : contextInfos) { - var sessionId = Utils.Paths.getFirst(context.getId(), "."); - var sessionInfo = sessionIds.computeIfAbsent(sessionId, (s) -> { + // collect sessions + for (var context : contextInfos) { + var sessionId = Utils.Paths.getFirst(context.getId(), "."); + var sessionInfo = + sessionIds.computeIfAbsent( + sessionId, + (s) -> { var ss = new SessionInfo(); ss.setId(s); ss.setUsername(context.getUser()); return ss; - }); - sessionInfo.getContexts().add(context); - } - - return new ArrayList<>(sessionIds.values()); + }); + sessionInfo.getContexts().add(context); } + + return new ArrayList<>(sessionIds.values()); + } }