diff --git a/.github/dependabot.yml b/.github/dependabot.yml index cf5ca1f334..66ae07e78b 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -12,11 +12,22 @@ updates: - johanneshiry - t-ober - sensarmad + - sebastian-peter + - danielfeismann ignore: - dependency-name: org.spockframework:spock-core versions: - - 2.1-groovy-3.0-SNAPSHOT - 2.1-groovy-2.5-SNAPSHOT + - 2.1-groovy-3.0-SNAPSHOT + - 2.2-groovy-4.0-SNAPSHOT + - 2.2-groovy-2.5-SNAPSHOT + - 2.2-groovy-3.0-SNAPSHOT + - 2.2-M1-groovy-2.5 + - 2.2-M1-groovy-3.0 + - 2.2-M1-groovy-4.0 + - 2.2-M2-groovy-2.5 + - 2.2-M2-groovy-3.0 + - 2.2-M2-groovy-4.0 - dependency-name: org.scalatest:scalatest_2.13 versions: - 3.3.0-SNAP+ diff --git a/CHANGELOG.md b/CHANGELOG.md index fe547e8969..7b3caec0ae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,14 +5,21 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] + +### Added +- Implement SQL source for primary data [#34](https://github.com/ie3-institute/simona/issues/34), [#101](https://github.com/ie3-institute/simona/issues/101) + ### Changed - Re-organizing test resources into their respective packages [#105](https://github.com/ie3-institute/simona/issues/105) - BREAKING: Using snapshot version of PSDM - Simplified PrimaryServiceProxy due to changes in PSDM [#120](https://github.com/ie3-institute/simona/issues/120) +- Improved handling of weights and their sum in determination of weather data [#173](https://github.com/ie3-institute/simona/issues/173) - Improving code readability in EvcsAgent by moving FreeLotsRequest to separate methods [#19](https://github.com/ie3-institute/simona/issues/19) - Sending termination message to external simulation on expected and unexpected shutdowns of SIMONA [#35](https://github.com/ie3-institute/simona/issues/35) ### Fixed - Location of `vn_simona` test grid (was partially in Berlin and Dortmund) +- Let `ParticipantAgent` die after failed registration with secondary services (prevents stuck simulation) +- Fix default resolution of weather source wrapper [#78](https://github.com/ie3-institute/simona/issues/78) [Unreleased]: https://github.com/ie3-institute/simona/compare/a14a093239f58fca9b2b974712686b33e5e5f939...HEAD diff --git a/build.gradle b/build.gradle index ef0c3d0ff0..0a739102af 100644 --- a/build.gradle +++ b/build.gradle @@ -7,10 +7,10 @@ plugins { id 'signing' id 'maven-publish' // publish to a maven repo (local or mvn central, has to be defined) id 'pmd' // code check, working on source code - id 'com.diffplug.spotless' version '6.2.2'// code format + id 'com.diffplug.spotless' version '6.4.2'// code format id 'com.github.onslip.gradle-one-jar' version '1.0.6' // pack a self contained jar id "com.github.ben-manes.versions" version '0.42.0' - id "de.undercouch.download" version "5.0.1" // downloads plugin + id "de.undercouch.download" version "5.0.4" // downloads plugin id "kr.motd.sphinx" version "2.10.1" // documentation generation id "com.github.johnrengelman.shadow" version "7.1.2" // fat jar id "org.sonarqube" version "3.3" // sonarqube @@ -26,10 +26,12 @@ ext { scalaVersion = '2.13' scalaBinaryVersion = '2.13.8' - akkaVersion = '2.6.18' + akkaVersion = '2.6.19' tscfgVersion = '0.9.997' scapegoatVersion = '1.4.12' + testContainerVersion = '0.40.5' + scriptsLocation = 'gradle' + File.separator + 'scripts' + File.separator // location of script plugins } @@ -96,17 +98,21 @@ dependencies { /* logging */ implementation "com.typesafe.scala-logging:scala-logging_${scalaVersion}:3.9.4" // akka scala logging - implementation "ch.qos.logback:logback-classic:1.2.10" + implementation "ch.qos.logback:logback-classic:1.2.11" /* testing */ - testImplementation 'org.spockframework:spock-core:2.1-M2-groovy-3.0' + testImplementation 'org.spockframework:spock-core:2.1-groovy-3.0' testImplementation 'org.scalatestplus:mockito-3-4_2.13:3.2.10.0' - implementation 'org.mockito:mockito-core:4.3.1' // mocking framework + implementation 'org.mockito:mockito-core:4.4.0' // mocking framework testImplementation "org.scalatest:scalatest_${scalaVersion}:3.2.11" testRuntimeClasspath 'com.vladsch.flexmark:flexmark-all:0.64.0' testImplementation group: 'org.pegdown', name: 'pegdown', version: '1.6.0' testImplementation "com.typesafe.akka:akka-testkit_${scalaVersion}:${akkaVersion}" // akka testkit + // testcontainers + testImplementation "com.dimafeng:testcontainers-scala-scalatest_${scalaVersion}:${testContainerVersion}" + testImplementation "com.dimafeng:testcontainers-scala-postgresql_${scalaVersion}:${testContainerVersion}" + /* --- Scala libs --- */ /* CORE Scala */ implementation "org.scala-lang:scala-library:${scalaBinaryVersion}" @@ -136,13 +142,13 @@ dependencies { scalaCompilerPlugin "com.sksamuel.scapegoat:scalac-scapegoat-plugin_${scalaBinaryVersion}:${scapegoatVersion}" implementation 'org.apache.commons:commons-math3:3.6.1' // apache commons math3 - implementation 'org.apache.poi:poi-ooxml:5.2.0' // used for FilenameUtils + implementation 'org.apache.poi:poi-ooxml:5.2.2' // used for FilenameUtils implementation 'javax.measure:unit-api:2.1.3' - implementation 'tech.units:indriya:2.1.2' // quantities + implementation 'tech.units:indriya:2.1.3' // quantities implementation 'org.apache.commons:commons-csv:1.9.0' implementation 'org.scalanlp:breeze_2.13:1.3' // scientific calculations (http://www.scalanlp.org/) implementation 'de.lmu.ifi.dbs.elki:elki:0.7.5' // Statistics (for random load model) - implementation 'com.google.guava:guava:31.0.1-jre' // Building threads + implementation 'com.google.guava:guava:31.1-jre' // Building threads implementation 'org.jgrapht:jgrapht-core:1.5.1' } diff --git a/docs/readthedocs/_static/figures/uml/InitializationPhase.png b/docs/readthedocs/_static/figures/uml/InitializationPhase.png new file mode 100644 index 0000000000..2e82ab5e49 Binary files /dev/null and b/docs/readthedocs/_static/figures/uml/InitializationPhase.png differ diff --git a/docs/readthedocs/_static/figures/uml/ParticipantTriggeredByItself.png b/docs/readthedocs/_static/figures/uml/ParticipantTriggeredByItself.png new file mode 100644 index 0000000000..2d275dbeca Binary files /dev/null and b/docs/readthedocs/_static/figures/uml/ParticipantTriggeredByItself.png differ diff --git a/docs/readthedocs/_static/figures/uml/ParticipantTriggeredByPrimaryData.png b/docs/readthedocs/_static/figures/uml/ParticipantTriggeredByPrimaryData.png new file mode 100644 index 0000000000..bfb5a6af97 Binary files /dev/null and b/docs/readthedocs/_static/figures/uml/ParticipantTriggeredByPrimaryData.png differ diff --git a/docs/readthedocs/_static/figures/uml/ParticipantTriggeredBySecondaryData.png b/docs/readthedocs/_static/figures/uml/ParticipantTriggeredBySecondaryData.png new file mode 100644 index 0000000000..57ac769a12 Binary files /dev/null and b/docs/readthedocs/_static/figures/uml/ParticipantTriggeredBySecondaryData.png differ diff --git a/docs/readthedocs/usersguide.rst b/docs/readthedocs/usersguide.rst index 42d8a8034a..942970611d 100644 --- a/docs/readthedocs/usersguide.rst +++ b/docs/readthedocs/usersguide.rst @@ -17,11 +17,11 @@ To run and customize the project you need a Java Development Kit (JDK) installat Installation ============ -You can find and download the source code of the latest stable SIMONA version `here `_. Go ahead and clone the repository using git: +You can find and download the source code of the latest stable SIMONA version `here `_. Go ahead and clone the repository using git: .. code-block:: none - $ git clone https://git.ie3.e-technik.tu-dortmund.de/SIMONACrew/SIMONA.git + $ git clone https://github.com/ie3-institute/simona.git Running a Standalone Simulation @@ -171,7 +171,7 @@ SIMONA is capable of running an external sub-simulation by integration within th The information flow between SIMONA and the external simulation is partitioned into a control stream (see ``edu.ie3.simona.api.ExtSimAdapter``) and a number of optional data streams. Currently, only a data stream transporting electric vehicle movement information is implemented (see ``edu.ie3.simona.service.ev.ExtEvDataService``). -An external simulation has to depend on `SimonaAPI `_ and make use of some of its interfaces (see below). +An external simulation has to depend on `SimonaAPI `_ and make use of some of its interfaces (see below). In order to run an external simulation, several requirements have to be fulfilled and a bunch of preparation steps have to be followed. .. note:: @@ -201,3 +201,25 @@ These steps have to be performed each time updates to the external simulation ne - Copy the resulting *jar* (usually placed inside /build/libs) to ``./input/ext_sim/``. Now, when a simulation with SIMONA is started (see `above <#running-a-standalone-simulation>`_), the external simulation is triggered at each tick that it requested. + +Troubleshooting +=============== + +My power flow calculation isn't converging - why is that? +--------------------------------------------------------- + +When your power flow is not converging it means that the load situation in the grid during the time of the power flow calculation is not physically feasible. + +This can have basically one of the following two reasons: + +#. + There is more load in the grid than it can physically handle. + +#. + There is more generation in the grid than it can physically handle. + +One of the main reasons is a misconfiguration of the grid and its assets. +Assess the power of the load and generation units and check if the values make sense. +Keep in mind the metric prefixes that are assumed for the models, which are listed in the `PSDM docs `_. +If everything seems to be configured correctly it could also be the case that the grid itself is incorrectly configured. +Do a similar sanity check for the grids assets. diff --git a/docs/uml/main/ParticipantModelling.puml b/docs/uml/main/ParticipantModelling.puml index 2d1d1ec79c..ec44203170 100644 --- a/docs/uml/main/ParticipantModelling.puml +++ b/docs/uml/main/ParticipantModelling.puml @@ -41,9 +41,9 @@ package edu.ie3.edu.ie3.simona { } DateTime --|> SecondaryData - Class Weather{ - + diffRad: Quantity[Irradiation] - + dirRad: Quantity[Irradiation] + Class WeatherData{ + + diffIrr: Quantity[Irradiation] + + dirIrr: Quantity[Irradiation] + temp: Quantity[Temperature] + windVel: Quantity[Speed] } diff --git a/gradle/scripts/tscfg.gradle b/gradle/scripts/tscfg.gradle index 7f430ff1f7..ce1fe4dbe9 100644 --- a/gradle/scripts/tscfg.gradle +++ b/gradle/scripts/tscfg.gradle @@ -15,7 +15,7 @@ task genConfigClass { args = [ "build/tscfg-${tscfgVersion}.jar", "--spec", - "src/main/resources/config/simona-config-template.conf", + "src/main/resources/config/config-template.conf", "--scala", "--durations", "--pn", diff --git a/input/samples/vn_146_lv_small/fullGrid/transformer_2_w_type_input.csv b/input/samples/vn_146_lv_small/fullGrid/transformer_2_w_type_input.csv index f0c3702e2f..987777ff65 100644 --- a/input/samples/vn_146_lv_small/fullGrid/transformer_2_w_type_input.csv +++ b/input/samples/vn_146_lv_small/fullGrid/transformer_2_w_type_input.csv @@ -1,6 +1,6 @@ "uuid","b_m","d_phi","d_v","g_m","id","r_sc","s_rated","tap_max","tap_min","tap_neutr","tap_side","v_rated_a","v_rated_b","x_sc" 14b1798a-6903-49d6-8578-ad2a7d399341,0.0,0.0,1.5,0.0,HS-MS_1,45.375,20000.0,10,-10,0,false,110.0,20.0,102.759 -97735722-05cc-4ca8-8a8d-c08ac3ded19a,1.27,0.0,1.5,555.5,HöS-HS_1,5.415,200000.0,5,-5,0,false,380.0,110.0,108.165 +97735722-05cc-4ca8-8a8d-c08ac3ded19a,-1.27,0.0,1.5,555.5,HöS-HS_1,5.415,200000.0,5,-5,0,false,380.0,110.0,108.165 f88989c7-9812-4b3e-9bc0-3df29f1e5ae1,0.0,0.0,0.5,0.0,MS-NS_1,10.078,630.0,10,-10,0,false,20.0,0.4,23.312 cf7b1102-8dbd-4da2-a469-90800b3394b6,0.0,0.0,1.5,0.0,HS-MS_1,45.375,20000.0,10,-10,0,false,110.0,20.0,102.759 1214c366-826e-4aeb-88f5-af8f40acaa04,0.0,0.0,1.5,0.0,HS-MS_1,45.375,20000.0,10,-10,0,false,110.0,20.0,102.759 diff --git a/input/samples/vn_simona/fullGrid/pv_input.csv b/input/samples/vn_simona/fullGrid/pv_input.csv index 011dde4eca..a7c99726d2 100644 --- a/input/samples/vn_simona/fullGrid/pv_input.csv +++ b/input/samples/vn_simona/fullGrid/pv_input.csv @@ -1,4 +1,4 @@ -"uuid","albedo","azimuth","cos_phi_rated","eta_conv","height","id","k_g","k_t","market_reaction","node","operates_from","operates_until","operator","q_characteristics","s_rated" +"uuid","albedo","azimuth","cos_phi_rated","eta_conv","elevation_angle","id","k_g","k_t","market_reaction","node","operates_from","operates_until","operator","q_characteristics","s_rated" 5b38af42-1ee4-4a41-b666-ea141187df37,0.20000000298023224,-11.463644027709961,0.8999999761581421,96.0,33.62879943847656,NS_NET146_F2_(3)_PV,0.8999999761581421,1.0,false,0170837a-1876-45f9-a613-666f9991964d,,,,cosPhiFixed:{(0.00,0.90)},10.0 e447506e-3d43-4bce-8aab-a7ca8b7fbc45,0.20000000298023224,3.8914573192596436,0.8999999761581421,98.0,42.77021408081055,NS_NET146_F4_(9)_PV,0.8999999761581421,1.0,false,9b889b73-c108-4b38-b6eb-3377841e0c83,,,,cosPhiFixed:{(0.00,0.90)},10.0 6cac0624-6336-4418-bcf0-990abcdb824b,0.20000000298023224,-8.097375869750977,0.8999999761581421,98.0,44.90728759765625,NS_NET146_F4_(16)_PV,0.8999999761581421,1.0,false,9f7599de-c488-46c5-b053-1279a511f7b9,,,,cosPhiFixed:{(0.00,0.90)},30.0 diff --git a/input/samples/vn_simona/fullGrid/transformer_2_w_type_input.csv b/input/samples/vn_simona/fullGrid/transformer_2_w_type_input.csv index f0c3702e2f..987777ff65 100644 --- a/input/samples/vn_simona/fullGrid/transformer_2_w_type_input.csv +++ b/input/samples/vn_simona/fullGrid/transformer_2_w_type_input.csv @@ -1,6 +1,6 @@ "uuid","b_m","d_phi","d_v","g_m","id","r_sc","s_rated","tap_max","tap_min","tap_neutr","tap_side","v_rated_a","v_rated_b","x_sc" 14b1798a-6903-49d6-8578-ad2a7d399341,0.0,0.0,1.5,0.0,HS-MS_1,45.375,20000.0,10,-10,0,false,110.0,20.0,102.759 -97735722-05cc-4ca8-8a8d-c08ac3ded19a,1.27,0.0,1.5,555.5,HöS-HS_1,5.415,200000.0,5,-5,0,false,380.0,110.0,108.165 +97735722-05cc-4ca8-8a8d-c08ac3ded19a,-1.27,0.0,1.5,555.5,HöS-HS_1,5.415,200000.0,5,-5,0,false,380.0,110.0,108.165 f88989c7-9812-4b3e-9bc0-3df29f1e5ae1,0.0,0.0,0.5,0.0,MS-NS_1,10.078,630.0,10,-10,0,false,20.0,0.4,23.312 cf7b1102-8dbd-4da2-a469-90800b3394b6,0.0,0.0,1.5,0.0,HS-MS_1,45.375,20000.0,10,-10,0,false,110.0,20.0,102.759 1214c366-826e-4aeb-88f5-af8f40acaa04,0.0,0.0,1.5,0.0,HS-MS_1,45.375,20000.0,10,-10,0,false,110.0,20.0,102.759 diff --git a/src/main/resources/config/config-template.conf b/src/main/resources/config/config-template.conf index 8e6952e8f0..086469b577 100644 --- a/src/main/resources/config/config-template.conf +++ b/src/main/resources/config/config-template.conf @@ -101,9 +101,7 @@ simona.input.primary = { jdbcUrl: string userName: string password: string - weatherTableName: string schemaName: string | "public" - timeColumnName: string timePattern: string | "yyyy-MM-dd'T'HH:mm:ss[.S[S][S]]'Z'" # default pattern from PSDM:TimeBasedSimpleValueFactory } #@optional @@ -150,9 +148,8 @@ simona.input.weather.datasource = { jdbcUrl: string userName: string password: string - weatherTableName: string + tableName: string schemaName: string | "public" - timeColumnName: string } #@optional couchbaseParams = { diff --git a/src/main/scala/edu/ie3/simona/agent/participant/ParticipantAgentFundamentals.scala b/src/main/scala/edu/ie3/simona/agent/participant/ParticipantAgentFundamentals.scala index 6fd0577072..ea91071b7f 100644 --- a/src/main/scala/edu/ie3/simona/agent/participant/ParticipantAgentFundamentals.scala +++ b/src/main/scala/edu/ie3/simona/agent/participant/ParticipantAgentFundamentals.scala @@ -6,7 +6,7 @@ package edu.ie3.simona.agent.participant -import akka.actor.{ActorRef, FSM} +import akka.actor.{ActorRef, FSM, PoisonPill} import akka.event.LoggingAdapter import akka.util import akka.util.Timeout @@ -452,6 +452,7 @@ protected trait ParticipantAgentFundamentals[ ) } case RegistrationResponseMessage.RegistrationFailedMessage => + self ! PoisonPill throw new ActorNotRegisteredException( s"Registration of actor $actorName for ${sender()} failed." ) diff --git a/src/main/scala/edu/ie3/simona/agent/participant/pv/PVAgentFundamentals.scala b/src/main/scala/edu/ie3/simona/agent/participant/pv/PVAgentFundamentals.scala index 16547f8693..f42d8aa8c9 100644 --- a/src/main/scala/edu/ie3/simona/agent/participant/pv/PVAgentFundamentals.scala +++ b/src/main/scala/edu/ie3/simona/agent/participant/pv/PVAgentFundamentals.scala @@ -230,8 +230,8 @@ protected trait PVAgentFundamentals PVRelevantData( dateTime, tickInterval, - weatherData.diffRad, - weatherData.dirRad + weatherData.diffIrr, + weatherData.dirIrr ) val power = pvModel.calculatePower( diff --git a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala index 6283055bbc..e8fa1c018c 100644 --- a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala +++ b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala @@ -1,5 +1,5 @@ /* - * © 2021. TU Dortmund University, + * © 2022. TU Dortmund University, * Institute of Energy Systems, Energy Efficiency and Energy Economics, * Research group Distribution grid planning and operation */ @@ -908,10 +908,8 @@ object SimonaConfig { jdbcUrl: java.lang.String, password: java.lang.String, schemaName: java.lang.String, - timeColumnName: java.lang.String, timePattern: java.lang.String, - userName: java.lang.String, - weatherTableName: java.lang.String + userName: java.lang.String ) object SqlParams { def apply( @@ -925,14 +923,10 @@ object SimonaConfig { schemaName = if (c.hasPathOrNull("schemaName")) c.getString("schemaName") else "public", - timeColumnName = - $_reqStr(parentPath, c, "timeColumnName", $tsCfgValidator), timePattern = if (c.hasPathOrNull("timePattern")) c.getString("timePattern") else "yyyy-MM-dd'T'HH:mm:ss[.S[S][S]]'Z'", - userName = $_reqStr(parentPath, c, "userName", $tsCfgValidator), - weatherTableName = - $_reqStr(parentPath, c, "weatherTableName", $tsCfgValidator) + userName = $_reqStr(parentPath, c, "userName", $tsCfgValidator) ) } private def $_reqStr( @@ -1277,9 +1271,8 @@ object SimonaConfig { jdbcUrl: java.lang.String, password: java.lang.String, schemaName: java.lang.String, - timeColumnName: java.lang.String, - userName: java.lang.String, - weatherTableName: java.lang.String + tableName: java.lang.String, + userName: java.lang.String ) object SqlParams { def apply( @@ -1293,11 +1286,9 @@ object SimonaConfig { schemaName = if (c.hasPathOrNull("schemaName")) c.getString("schemaName") else "public", - timeColumnName = - $_reqStr(parentPath, c, "timeColumnName", $tsCfgValidator), - userName = $_reqStr(parentPath, c, "userName", $tsCfgValidator), - weatherTableName = - $_reqStr(parentPath, c, "weatherTableName", $tsCfgValidator) + tableName = + $_reqStr(parentPath, c, "tableName", $tsCfgValidator), + userName = $_reqStr(parentPath, c, "userName", $tsCfgValidator) ) } private def $_reqStr( diff --git a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala index 7c7b14013d..84feebd2e2 100644 --- a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala +++ b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala @@ -6,7 +6,8 @@ package edu.ie3.simona.event.listener -import akka.actor.{ActorRef, FSM, PoisonPill, Props, Stash} +import akka.actor.{ActorRef, FSM, Props, Stash, Status} +import akka.pattern.pipe import akka.stream.Materializer import edu.ie3.datamodel.io.processor.result.ResultEntityProcessor import edu.ie3.datamodel.models.result.ResultEntity @@ -23,6 +24,7 @@ import edu.ie3.simona.event.listener.ResultEventListener.{ BaseData, Init, ResultEventListenerData, + SinkResponse, Transformer3wKey, UninitializedData } @@ -59,6 +61,10 @@ object ResultEventListener extends Transformer3wResultSupport { private final case object Init + private final case class SinkResponse( + response: Map[Class[_], ResultEntitySink] + ) + /** [[ResultEventListener]] base data containing all information the listener * needs * @@ -106,29 +112,33 @@ object ResultEventListener extends Transformer3wResultSupport { case _: ResultSinkType.Csv => eventClassesToConsider .map(resultClass => { - val fileName = - resultFileHierarchy.rawOutputDataFilePaths.getOrElse( - resultClass, - throw new FileHierarchyException( - s"Unable to get file path for result class '${resultClass.getSimpleName}' from output file hierarchy! " + - s"Available file result file paths: ${resultFileHierarchy.rawOutputDataFilePaths}" - ) - ) - if (fileName.endsWith(".csv") || fileName.endsWith(".csv.gz")) { - val sink = - ResultEntityCsvSink( - fileName.replace(".gz", ""), - new ResultEntityProcessor(resultClass), - fileName.endsWith(".gz") + resultFileHierarchy.rawOutputDataFilePaths + .get(resultClass) + .map(Future.successful) + .getOrElse( + Future.failed( + new FileHierarchyException( + s"Unable to get file path for result class '${resultClass.getSimpleName}' from output file hierarchy! " + + s"Available file result file paths: ${resultFileHierarchy.rawOutputDataFilePaths}" + ) ) - sink.map((resultClass, _)) - } else { - throw new ProcessResultEventException( - s"Invalid output file format for file $fileName provided. Currently only '.csv' or '.csv.gz' is supported!" ) - } + .flatMap { fileName => + if (fileName.endsWith(".csv") || fileName.endsWith(".csv.gz")) { + ResultEntityCsvSink( + fileName.replace(".gz", ""), + new ResultEntityProcessor(resultClass), + fileName.endsWith(".gz") + ).map((resultClass, _)) + } else { + Future( + throw new ProcessResultEventException( + s"Invalid output file format for file $fileName provided. Currently only '.csv' or '.csv.gz' is supported!" + ) + ) + } + } }) - case ResultSinkType.InfluxDb1x(url, database, scenario) => // creates one connection per result entity that should be processed eventClassesToConsider @@ -287,10 +297,6 @@ class ResultEventListener( stash() stay() - case Event(baseData: BaseData, UninitializedData) => - unstashAll() - goto(Idle) using baseData - case Event(Init, _) => Future .sequence( @@ -299,18 +305,20 @@ class ResultEventListener( resultFileHierarchy ) ) - .onComplete { - case Failure(exception) => - throw new InitializationException( - "Cannot initialize result sinks!" - ).initCause(exception) - self ! PoisonPill - case Success(classToSink) => - log.debug("Initialization complete!") - supervisor ! ServiceInitComplete - self ! BaseData(classToSink.toMap) - } + .map(result => SinkResponse(result.toMap)) + .pipeTo(self) stay() + + case Event(SinkResponse(classToSink), _) => + // Sink Initialization succeeded + log.debug("Initialization complete!") + supervisor ! ServiceInitComplete + + unstashAll() + goto(Idle) using BaseData(classToSink) + + case Event(Status.Failure(ex), _) => + throw new InitializationException("Unable to setup SimonaSim.", ex) } when(Idle) { diff --git a/src/main/scala/edu/ie3/simona/model/grid/Transformer3wModel.scala b/src/main/scala/edu/ie3/simona/model/grid/Transformer3wModel.scala index cb54dde44e..c9bfa8c95c 100644 --- a/src/main/scala/edu/ie3/simona/model/grid/Transformer3wModel.scala +++ b/src/main/scala/edu/ie3/simona/model/grid/Transformer3wModel.scala @@ -327,7 +327,7 @@ case object Transformer3wModel { transformerRefSystem.rInPu(transformerType.getrScA), transformerRefSystem.xInPu(transformerType.getxScA), transformerRefSystem.gInPu(transformerType.getgM), - transformerRefSystem.gInPu(transformerType.getbM).multiply(-1) + transformerRefSystem.gInPu(transformerType.getbM) ) case PowerFlowCaseB => ( diff --git a/src/main/scala/edu/ie3/simona/model/grid/TransformerModel.scala b/src/main/scala/edu/ie3/simona/model/grid/TransformerModel.scala index 63074a3138..18f573547f 100644 --- a/src/main/scala/edu/ie3/simona/model/grid/TransformerModel.scala +++ b/src/main/scala/edu/ie3/simona/model/grid/TransformerModel.scala @@ -147,7 +147,7 @@ case object TransformerModel { trafoType.getrSc.divide(squaredNominalVoltRatio), trafoType.getxSc.divide(squaredNominalVoltRatio), trafoType.getgM.multiply(squaredNominalVoltRatio), - trafoType.getbM.multiply(-1).multiply(squaredNominalVoltRatio) + trafoType.getbM.multiply(squaredNominalVoltRatio) ) /* Transfer the dimensionless parameters into the grid reference system */ diff --git a/src/main/scala/edu/ie3/simona/model/participant/PVModel.scala b/src/main/scala/edu/ie3/simona/model/participant/PVModel.scala index 235f8a3981..aa3f746cc7 100644 --- a/src/main/scala/edu/ie3/simona/model/participant/PVModel.scala +++ b/src/main/scala/edu/ie3/simona/model/participant/PVModel.scala @@ -817,7 +817,7 @@ case object PVModel { inputModel.getAlbedo, inputModel.getEtaConv, inputModel.getAzimuth, - inputModel.getHeight + inputModel.getElevationAngle ) model.enable() diff --git a/src/main/scala/edu/ie3/simona/ontology/messages/services/WeatherMessage.scala b/src/main/scala/edu/ie3/simona/ontology/messages/services/WeatherMessage.scala index 140def4563..27a9e88e7f 100644 --- a/src/main/scala/edu/ie3/simona/ontology/messages/services/WeatherMessage.scala +++ b/src/main/scala/edu/ie3/simona/ontology/messages/services/WeatherMessage.scala @@ -56,11 +56,21 @@ object WeatherMessage { ) extends WeatherMessage with ProvisionMessage[WeatherData] - /** Hold entire weather result together + /** Container class for the entirety of weather information at a certain point + * in time and at a certain coordinate + * + * @param diffIrr + * Diffuse irradiance on the horizontal pane + * @param dirIrr + * Direct irradiance on the horizontal pane + * @param temp + * Temperature + * @param windVel + * Wind velocity */ final case class WeatherData( - diffRad: ComparableQuantity[Irradiance], - dirRad: ComparableQuantity[Irradiance], + diffIrr: ComparableQuantity[Irradiance], + dirIrr: ComparableQuantity[Irradiance], temp: ComparableQuantity[Temperature], windVel: ComparableQuantity[Speed] ) extends SecondaryData diff --git a/src/main/scala/edu/ie3/simona/service/primary/PrimaryServiceProxy.scala b/src/main/scala/edu/ie3/simona/service/primary/PrimaryServiceProxy.scala index 27c9808db9..de9c367f45 100644 --- a/src/main/scala/edu/ie3/simona/service/primary/PrimaryServiceProxy.scala +++ b/src/main/scala/edu/ie3/simona/service/primary/PrimaryServiceProxy.scala @@ -7,14 +7,32 @@ package edu.ie3.simona.service.primary import akka.actor.{Actor, ActorRef, PoisonPill, Props} -import edu.ie3.datamodel.io.connectors.CsvFileConnector.CsvIndividualTimeSeriesMetaInformation -import edu.ie3.datamodel.io.csv.timeseries.ColumnScheme -import edu.ie3.datamodel.io.naming.FileNamingStrategy -import edu.ie3.datamodel.io.source.TimeSeriesMappingSource -import edu.ie3.datamodel.io.source.csv.CsvTimeSeriesMappingSource +import edu.ie3.datamodel.io.connectors.SqlConnector +import edu.ie3.datamodel.io.naming.{ + DatabaseNamingStrategy, + EntityPersistenceNamingStrategy, + FileNamingStrategy +} +import edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation +import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation +import edu.ie3.datamodel.io.source.{ + TimeSeriesMappingSource, + TimeSeriesTypeSource +} +import edu.ie3.datamodel.io.source.csv.{ + CsvTimeSeriesMappingSource, + CsvTimeSeriesTypeSource +} +import edu.ie3.datamodel.io.source.sql.{ + SqlTimeSeriesMappingSource, + SqlTimeSeriesTypeSource +} import edu.ie3.datamodel.models.value.Value import edu.ie3.simona.config.SimonaConfig -import edu.ie3.simona.config.SimonaConfig.Simona.Input.Primary.CsvParams +import edu.ie3.simona.config.SimonaConfig.Simona.Input.Primary.{ + CsvParams, + SqlParams +} import edu.ie3.simona.config.SimonaConfig.Simona.Input.{ Primary => PrimaryConfig } @@ -43,7 +61,8 @@ import edu.ie3.simona.service.primary.PrimaryServiceProxy.{ } import edu.ie3.simona.service.primary.PrimaryServiceWorker.{ CsvInitPrimaryServiceStateData, - InitPrimaryServiceStateData + InitPrimaryServiceStateData, + SqlInitPrimaryServiceStateData } import java.text.SimpleDateFormat @@ -51,7 +70,6 @@ import java.time.ZonedDateTime import java.util.UUID import scala.Option.when import scala.jdk.CollectionConverters._ -import scala.jdk.OptionConverters._ import scala.util.{Failure, Success, Try} /** This actor has information on which models can be replaced by precalculated @@ -128,36 +146,26 @@ case class PrimaryServiceProxy( private def prepareStateData( primaryConfig: PrimaryConfig, simulationStart: ZonedDateTime - ): Try[PrimaryServiceStateData] = - Seq( - primaryConfig.sqlParams, - primaryConfig.influxDb1xParams, - primaryConfig.csvParams, - primaryConfig.couchbaseParams - ).filter(_.isDefined).flatten.headOption match { - case Some(CsvParams(csvSep, folderPath, _)) => - // TODO: Configurable file naming strategy - val mappingSource = new CsvTimeSeriesMappingSource( - csvSep, - folderPath, - new FileNamingStrategy() - ) + ): Try[PrimaryServiceStateData] = { + createSources(primaryConfig).map { + case (mappingSource, metaInformationSource) => val modelToTimeSeries = mappingSource.getMapping.asScala.toMap + val timeSeriesMetaInformation = + metaInformationSource.getTimeSeriesMetaInformation.asScala.toMap + val timeSeriesToSourceRef = modelToTimeSeries.values .to(LazyList) .distinct .flatMap { timeSeriesUuid => - mappingSource - .getTimeSeriesMetaInformation(timeSeriesUuid) - .toScala match { + timeSeriesMetaInformation + .get(timeSeriesUuid) match { case Some(metaInformation) => - val columnScheme = metaInformation.getColumnScheme /* Only register those entries, that meet the supported column schemes */ when( PrimaryServiceWorker.supportedColumnSchemes - .contains(columnScheme) + .contains(metaInformation.getColumnScheme) ) { - timeSeriesUuid -> SourceRef(columnScheme, None) + timeSeriesUuid -> SourceRef(metaInformation, None) } case None => log.warning( @@ -168,16 +176,57 @@ case class PrimaryServiceProxy( } } .toMap + PrimaryServiceStateData( + modelToTimeSeries, + timeSeriesToSourceRef, + simulationStart, + primaryConfig, + mappingSource + ) + } + } + + private def createSources( + primaryConfig: PrimaryConfig + ): Try[(TimeSeriesMappingSource, TimeSeriesTypeSource)] = { + Seq( + primaryConfig.sqlParams, + primaryConfig.influxDb1xParams, + primaryConfig.csvParams, + primaryConfig.couchbaseParams + ).filter(_.isDefined).flatten.headOption match { + case Some(CsvParams(csvSep, folderPath, _)) => + val fileNamingStrategy = new FileNamingStrategy() Success( - PrimaryServiceStateData( - modelToTimeSeries, - timeSeriesToSourceRef, - simulationStart, - primaryConfig, - mappingSource + new CsvTimeSeriesMappingSource( + csvSep, + folderPath, + fileNamingStrategy + ), + new CsvTimeSeriesTypeSource( + csvSep, + folderPath, + fileNamingStrategy + ) + ) + case Some(sqlParams: SqlParams) => + val sqlConnector = new SqlConnector( + sqlParams.jdbcUrl, + sqlParams.userName, + sqlParams.password + ) + Success( + new SqlTimeSeriesMappingSource( + sqlConnector, + sqlParams.schemaName, + new EntityPersistenceNamingStrategy() + ), + new SqlTimeSeriesTypeSource( + sqlConnector, + sqlParams.schemaName, + new DatabaseNamingStrategy() ) ) - case Some(x) => Failure( new IllegalArgumentException( @@ -191,6 +240,7 @@ case class PrimaryServiceProxy( ) ) } + } /** Message handling, if the actor has been initialized already. This method * basically handles registration requests, checks, if pre-calculated, @@ -251,14 +301,12 @@ case class PrimaryServiceProxy( /* There is yet a worker apparent. Register the requesting actor. The worker will reply to the original * requesting actor. */ worker ! WorkerRegistrationMessage(requestingActor) - case Some(SourceRef(columnScheme, None)) => + case Some(SourceRef(metaInformation, None)) => /* There is NO worker apparent, yet. Spin one off. */ initializeWorker( - columnScheme, - timeSeriesUuid, + metaInformation, stateData.simulationStart, - stateData.primaryConfig, - stateData.mappingSource + stateData.primaryConfig ) match { case Success(workerRef) => /* Forward the registration request. The worker will reply about successful registration or not. */ @@ -289,33 +337,28 @@ case class PrimaryServiceProxy( /** Instantiate a new [[PrimaryServiceWorker]] and send initialization * information * - * @param columnScheme - * Scheme of the data to expect + * @param metaInformation + * Meta information (including column scheme) of the time series + * @param simulationStart + * The time of the simulation start * @param primaryConfig * Configuration for the primary config - * @param mappingSource - * Source for time series mapping, that might deliver additional - * information for the source initialization * @return * The [[ActorRef]] to the worker */ protected def initializeWorker( - columnScheme: ColumnScheme, - timeSeriesUuid: UUID, + metaInformation: IndividualTimeSeriesMetaInformation, simulationStart: ZonedDateTime, - primaryConfig: PrimaryConfig, - mappingSource: TimeSeriesMappingSource + primaryConfig: PrimaryConfig ): Try[ActorRef] = { val workerRef = classToWorkerRef( - columnScheme.getValueClass, - timeSeriesUuid.toString, - simulationStart + metaInformation.getColumnScheme.getValueClass, + metaInformation.getUuid.toString ) toInitData( - primaryConfig, - mappingSource, - timeSeriesUuid, - simulationStart + metaInformation, + simulationStart, + primaryConfig ) match { case Success(initData) => scheduler ! ScheduleTriggerMessage( @@ -341,8 +384,6 @@ case class PrimaryServiceProxy( * Class of the values to provide later on * @param timeSeriesUuid * uuid of the time series the actor processes - * @param simulationStart - * Wall clock time of first instant in simulation * @tparam V * Type of the class to provide * @return @@ -350,33 +391,29 @@ case class PrimaryServiceProxy( */ protected def classToWorkerRef[V <: Value]( valueClass: Class[V], - timeSeriesUuid: String, - simulationStart: ZonedDateTime + timeSeriesUuid: String ): ActorRef = { import edu.ie3.simona.actor.SimonaActorNaming._ context.system.simonaActorOf( - PrimaryServiceWorker.props(scheduler, valueClass, simulationStart), + PrimaryServiceWorker.props(scheduler, valueClass), timeSeriesUuid ) } /** Building proper init data for the worker * - * @param primaryConfig - * Configuration for primary sources - * @param mappingSource - * Source to get mapping information about time series - * @param timeSeriesUuid - * Unique identifier for the time series + * @param metaInformation + * Meta information (including column scheme) of the time series * @param simulationStart - * Wall clock time of the first instant in simulation + * The time of the simulation start + * @param primaryConfig + * Configuration for the primary config * @return */ private def toInitData( - primaryConfig: PrimaryConfig, - mappingSource: TimeSeriesMappingSource, - timeSeriesUuid: UUID, - simulationStart: ZonedDateTime + metaInformation: IndividualTimeSeriesMetaInformation, + simulationStart: ZonedDateTime, + primaryConfig: PrimaryConfig ): Try[InitPrimaryServiceStateData] = primaryConfig match { case PrimaryConfig( @@ -385,29 +422,43 @@ case class PrimaryServiceProxy( None, None ) => - /* The mapping and actual data sources are from csv. At first, get the file name of the file to read. */ - Try(mappingSource.getTimeSeriesMetaInformation(timeSeriesUuid).get) - .flatMap { - /* Time series meta information could be successfully obtained */ - case csvMetaData: CsvIndividualTimeSeriesMetaInformation => - Success( - CsvInitPrimaryServiceStateData( - timeSeriesUuid, - simulationStart, - csvSep, - directoryPath, - csvMetaData.getFullFilePath, - new FileNamingStrategy(), - timePattern - ) + /* The actual data sources are from csv. Meta information have to match */ + metaInformation match { + case csvMetaData: CsvIndividualTimeSeriesMetaInformation => + Success( + CsvInitPrimaryServiceStateData( + csvMetaData.getUuid, + simulationStart, + csvSep, + directoryPath, + csvMetaData.getFullFilePath, + new FileNamingStrategy(), + timePattern ) - case invalidMetaData => - Failure( - new InitializationException( - s"Expected '${classOf[CsvIndividualTimeSeriesMetaInformation]}', but got '$invalidMetaData'." - ) + ) + case invalidMetaData => + Failure( + new InitializationException( + s"Expected '${classOf[CsvIndividualTimeSeriesMetaInformation]}', but got '$invalidMetaData'." ) - } + ) + } + + case PrimaryConfig( + None, + None, + None, + Some(sqlParams: SqlParams) + ) => + Success( + SqlInitPrimaryServiceStateData( + metaInformation.getUuid, + simulationStart, + sqlParams, + new DatabaseNamingStrategy() + ) + ) + case unsupported => Failure( new InitializationException( @@ -489,14 +540,14 @@ object PrimaryServiceProxy { /** Giving reference to the target time series and source worker. * - * @param columnScheme - * Column scheme of the time series to get + * @param metaInformation + * Meta information (including column scheme) of the time series * @param worker - * Optional reference to a yet existing worker providing information on - * that time series + * Optional reference to an already existing worker providing information + * on that time series */ final case class SourceRef( - columnScheme: ColumnScheme, + metaInformation: IndividualTimeSeriesMetaInformation, worker: Option[ActorRef] ) @@ -522,7 +573,7 @@ object PrimaryServiceProxy { } val supportedSources = - Set("csv") + Set("csv", "sql") val sourceConfigs = Seq( primaryConfig.couchbaseParams, @@ -546,6 +597,8 @@ object PrimaryServiceProxy { // note: if inheritance is supported by tscfg, // the following method should be called for all different supported sources! checkTimePattern(csvParams.timePattern) + case Some(sqlParams: SimonaConfig.Simona.Input.Primary.SqlParams) => + checkTimePattern(sqlParams.timePattern) case Some(x) => throw new InvalidConfigParameterException( s"Invalid configuration '$x' for a time series source.\nAvailable types:\n\t${supportedSources diff --git a/src/main/scala/edu/ie3/simona/service/primary/PrimaryServiceWorker.scala b/src/main/scala/edu/ie3/simona/service/primary/PrimaryServiceWorker.scala index 8553ef5533..7d25630cac 100644 --- a/src/main/scala/edu/ie3/simona/service/primary/PrimaryServiceWorker.scala +++ b/src/main/scala/edu/ie3/simona/service/primary/PrimaryServiceWorker.scala @@ -7,14 +7,17 @@ package edu.ie3.simona.service.primary import akka.actor.{ActorRef, Props} -import edu.ie3.datamodel.io.csv.timeseries.ColumnScheme +import edu.ie3.datamodel.io.connectors.SqlConnector import edu.ie3.datamodel.io.factory.timeseries.TimeBasedSimpleValueFactory -import edu.ie3.datamodel.io.naming.FileNamingStrategy +import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme +import edu.ie3.datamodel.io.naming.{DatabaseNamingStrategy, FileNamingStrategy} import edu.ie3.datamodel.io.source.TimeSeriesSource import edu.ie3.datamodel.io.source.csv.CsvTimeSeriesSource +import edu.ie3.datamodel.io.source.sql.SqlTimeSeriesSource import edu.ie3.datamodel.models.value.Value import edu.ie3.simona.agent.participant.data.Data.PrimaryData import edu.ie3.simona.agent.participant.data.Data.PrimaryData.RichValue +import edu.ie3.simona.config.SimonaConfig.Simona.Input.Primary.SqlParams import edu.ie3.simona.exceptions.InitializationException import edu.ie3.simona.exceptions.WeatherServiceException.InvalidRegistrationRequestException import edu.ie3.simona.ontology.messages.SchedulerMessage @@ -24,11 +27,11 @@ import edu.ie3.simona.service.ServiceStateData.{ InitializeServiceStateData, ServiceActivationBaseStateData } -import edu.ie3.simona.service.{ServiceStateData, SimonaService} import edu.ie3.simona.service.primary.PrimaryServiceWorker.{ PrimaryServiceInitializedStateData, ProvidePrimaryDataMessage } +import edu.ie3.simona.service.{ServiceStateData, SimonaService} import edu.ie3.simona.util.TickUtil.{RichZonedDateTime, TickLong} import edu.ie3.util.scala.collection.immutable.SortedDistinctSeq @@ -40,8 +43,7 @@ import scala.util.{Failure, Success, Try} final case class PrimaryServiceWorker[V <: Value]( override protected val scheduler: ActorRef, - valueClass: Class[V], - private implicit val startDateTime: ZonedDateTime + valueClass: Class[V] ) extends SimonaService[PrimaryServiceInitializedStateData[V]](scheduler) { /** Initialize the actor with the given information. Try to figure out the @@ -61,68 +63,99 @@ final case class PrimaryServiceWorker[V <: Value]( PrimaryServiceInitializedStateData[V], Option[Seq[SchedulerMessage.ScheduleTriggerMessage]] ) - ] = initServiceData match { - case PrimaryServiceWorker.CsvInitPrimaryServiceStateData( - timeSeriesUuid, - simulationStart, - csvSep, - directoryPath, - filePath, - fileNamingStrategy, - timePattern - ) => - /* Got the right data. Attempt to set up a source and acquire information */ - implicit val startDateTime: ZonedDateTime = simulationStart + ] = { + (initServiceData match { + case PrimaryServiceWorker.CsvInitPrimaryServiceStateData( + timeSeriesUuid, + simulationStart, + csvSep, + directoryPath, + filePath, + fileNamingStrategy, + timePattern + ) => + Try { + /* Set up source and acquire information */ + val factory = new TimeBasedSimpleValueFactory(valueClass, timePattern) + val source = new CsvTimeSeriesSource( + csvSep, + directoryPath, + fileNamingStrategy, + timeSeriesUuid, + filePath, + valueClass, + factory + ) + (source, simulationStart) + } - Try { - /* Set up source and acquire information */ - val factory = new TimeBasedSimpleValueFactory(valueClass, timePattern) - val source = new CsvTimeSeriesSource( - csvSep, - directoryPath, - fileNamingStrategy, - timeSeriesUuid, - filePath, - valueClass, - factory - ) - /* This seems not to be very efficient, but it is as efficient as possible. The getter method points to a - * final attribute within the source implementation. */ - val (maybeNextTick, furtherActivationTicks) = SortedDistinctSeq( - source.getTimeSeries.getEntries.asScala - .filter { timeBasedValue => - val dateTime = timeBasedValue.getTime - dateTime.isEqual(simulationStart) || dateTime.isAfter( - simulationStart - ) - } - .map(timeBasedValue => timeBasedValue.getTime.toTick) - .toSeq - .sorted - ).pop + case PrimaryServiceWorker.SqlInitPrimaryServiceStateData( + timeSeriesUuid: UUID, + simulationStart: ZonedDateTime, + sqlParams: SqlParams, + namingStrategy: DatabaseNamingStrategy + ) => + Try { + val factory = + new TimeBasedSimpleValueFactory(valueClass, sqlParams.timePattern) - /* Set up the state data and determine the next activation tick. */ - val initializedStateData = - PrimaryServiceInitializedStateData( - maybeNextTick, - furtherActivationTicks, - simulationStart, - source + val sqlConnector = new SqlConnector( + sqlParams.jdbcUrl, + sqlParams.userName, + sqlParams.password ) - val triggerMessage = - ServiceActivationBaseStateData.tickToScheduleTriggerMessages( - maybeNextTick, - self + + val source = new SqlTimeSeriesSource( + sqlConnector, + sqlParams.schemaName, + namingStrategy, + timeSeriesUuid, + valueClass, + factory + ) + + (source, simulationStart) + } + + case unsupported => + /* Got the wrong init data */ + Failure( + new InitializationException( + s"Provided init data '${unsupported.getClass.getSimpleName}' for primary service are invalid!" ) - (initializedStateData, triggerMessage) - } - case unsupported => - /* Got the wrong init data */ - Failure( - new InitializationException( - s"Provided init data '${unsupported.getClass.getSimpleName}' for primary service are invalid!" ) - ) + }).map { case (source, simulationStart) => + implicit val startDateTime: ZonedDateTime = simulationStart + + val (maybeNextTick, furtherActivationTicks) = SortedDistinctSeq( + // Note: The whole data set is used here, which might be inefficient depending on the source implementation. + source.getTimeSeries.getEntries.asScala + .filter { timeBasedValue => + val dateTime = timeBasedValue.getTime + dateTime.isEqual(simulationStart) || dateTime.isAfter( + simulationStart + ) + } + .map(timeBasedValue => timeBasedValue.getTime.toTick) + .toSeq + .sorted + ).pop + + /* Set up the state data and determine the next activation tick. */ + val initializedStateData = + PrimaryServiceInitializedStateData( + maybeNextTick, + furtherActivationTicks, + simulationStart, + source + ) + val triggerMessage = + ServiceActivationBaseStateData.tickToScheduleTriggerMessages( + maybeNextTick, + self + ) + (initializedStateData, triggerMessage) + } } /** Handle a request to register for information from this service @@ -289,7 +322,7 @@ final case class PrimaryServiceWorker[V <: Value]( } } -case object PrimaryServiceWorker { +object PrimaryServiceWorker { /** List of supported column schemes aka. column schemes, that belong to * primary data @@ -303,10 +336,9 @@ case object PrimaryServiceWorker { def props[V <: Value]( scheduler: ActorRef, - valueClass: Class[V], - simulationStart: ZonedDateTime + valueClass: Class[V] ): Props = - Props(new PrimaryServiceWorker(scheduler, valueClass, simulationStart)) + Props(new PrimaryServiceWorker(scheduler, valueClass)) /** Abstract class pattern for specific [[InitializeServiceStateData]]. * Different implementations are needed, because the [[PrimaryServiceProxy]] @@ -348,6 +380,25 @@ case object PrimaryServiceWorker { timePattern: String ) extends InitPrimaryServiceStateData + /** Specific implementation of [[InitPrimaryServiceStateData]], if the source + * to use utilizes an SQL database. + * + * @param timeSeriesUuid + * Unique identifier of the time series to read + * @param simulationStart + * Wall clock time of the beginning of simulation time + * @param sqlParams + * Parameters regarding SQL connection and table selection + * @param databaseNamingStrategy + * Strategy of naming database entities, such as tables + */ + final case class SqlInitPrimaryServiceStateData( + override val timeSeriesUuid: UUID, + override val simulationStart: ZonedDateTime, + sqlParams: SqlParams, + databaseNamingStrategy: DatabaseNamingStrategy + ) extends InitPrimaryServiceStateData + /** Class carrying the state of a fully initialized [[PrimaryServiceWorker]] * * @param maybeNextActivationTick diff --git a/src/main/scala/edu/ie3/simona/service/weather/WeatherSource.scala b/src/main/scala/edu/ie3/simona/service/weather/WeatherSource.scala index 6da1016db2..488ecd4e5e 100644 --- a/src/main/scala/edu/ie3/simona/service/weather/WeatherSource.scala +++ b/src/main/scala/edu/ie3/simona/service/weather/WeatherSource.scala @@ -164,7 +164,8 @@ trait WeatherSource { } } - /** Determine the weights of each coordinate + /** Determine the weights of each coordinate. It is ensured, that the entirety + * of weights sum up to 1.0 * * @param nearestCoordinates * Collection of nearest coordinates with their distances @@ -522,9 +523,9 @@ object WeatherSource { ): WeatherData = { WeatherData( weatherValue.getSolarIrradiance.getDiffuseIrradiance - .orElse(EMPTY_WEATHER_DATA.diffRad), + .orElse(EMPTY_WEATHER_DATA.diffIrr), weatherValue.getSolarIrradiance.getDirectIrradiance - .orElse(EMPTY_WEATHER_DATA.dirRad), + .orElse(EMPTY_WEATHER_DATA.dirIrr), weatherValue.getTemperature.getTemperature .orElse(EMPTY_WEATHER_DATA.temp), weatherValue.getWind.getVelocity.orElse(EMPTY_WEATHER_DATA.windVel) @@ -559,7 +560,7 @@ object WeatherSource { */ object WeatherScheme extends ParsableEnumeration { val ICON: Value = Value("icon") - val PSDM: Value = Value("psdm") + val COSMO: Value = Value("cosmo") } } diff --git a/src/main/scala/edu/ie3/simona/service/weather/WeatherSourceWrapper.scala b/src/main/scala/edu/ie3/simona/service/weather/WeatherSourceWrapper.scala index baf41e6c9f..2d61d4c1c5 100644 --- a/src/main/scala/edu/ie3/simona/service/weather/WeatherSourceWrapper.scala +++ b/src/main/scala/edu/ie3/simona/service/weather/WeatherSourceWrapper.scala @@ -13,8 +13,8 @@ import edu.ie3.datamodel.io.connectors.{ SqlConnector } import edu.ie3.datamodel.io.factory.timeseries.{ - IconTimeBasedWeatherValueFactory, - PsdmTimeBasedWeatherValueFactory + CosmoTimeBasedWeatherValueFactory, + IconTimeBasedWeatherValueFactory } import edu.ie3.datamodel.io.naming.FileNamingStrategy import edu.ie3.datamodel.io.source.couchbase.CouchbaseWeatherSource @@ -45,7 +45,9 @@ import edu.ie3.simona.util.TickUtil import edu.ie3.simona.util.TickUtil.TickLong import edu.ie3.util.exceptions.EmptyQuantityException import edu.ie3.util.interval.ClosedInterval +import edu.ie3.util.scala.DoubleUtils.ImplicitDouble import tech.units.indriya.quantity.Quantities +import tech.units.indriya.unit.Units import java.time.ZonedDateTime import javax.measure.Quantity @@ -140,21 +142,21 @@ private[weather] final case class WeatherSourceWrapper private ( ) /* Determine actual weights and contributions */ - val (diffRadContrib, diffRadWeight) = currentWeather.diffRad match { - case EMPTY_WEATHER_DATA.diffRad => (EMPTY_WEATHER_DATA.diffRad, 0d) - case nonEmptyDiffRad => + val (diffIrrContrib, diffIrrWeight) = currentWeather.diffIrr match { + case EMPTY_WEATHER_DATA.diffIrr => (EMPTY_WEATHER_DATA.diffIrr, 0d) + case nonEmptyDiffIrr => calculateContrib( - nonEmptyDiffRad, + nonEmptyDiffIrr, weight, StandardUnits.SOLAR_IRRADIANCE, s"Diffuse solar irradiance not available at $point." ) } - val (dirRadContrib, dirRadWeight) = currentWeather.dirRad match { - case EMPTY_WEATHER_DATA.dirRad => (EMPTY_WEATHER_DATA.dirRad, 0d) - case nonEmptyDirRad => + val (dirIrrContrib, dirIrrWeight) = currentWeather.dirIrr match { + case EMPTY_WEATHER_DATA.`dirIrr` => (EMPTY_WEATHER_DATA.dirIrr, 0d) + case nonEmptyDirIrr => calculateContrib( - nonEmptyDirRad, + nonEmptyDirIrr, weight, StandardUnits.SOLAR_IRRADIANCE, s"Direct solar irradiance not available at $point." @@ -164,7 +166,7 @@ private[weather] final case class WeatherSourceWrapper private ( case EMPTY_WEATHER_DATA.temp => (EMPTY_WEATHER_DATA.temp, 0d) case nonEmptyTemp => calculateContrib( - nonEmptyTemp, + nonEmptyTemp.to(Units.KELVIN), weight, StandardUnits.TEMPERATURE, s"Temperature not available at $point." @@ -184,29 +186,22 @@ private[weather] final case class WeatherSourceWrapper private ( /* Sum up weight and contributions */ ( WeatherData( - averagedWeather.diffRad.add(diffRadContrib), - averagedWeather.dirRad.add(dirRadContrib), + averagedWeather.diffIrr.add(diffIrrContrib), + averagedWeather.dirIrr.add(dirIrrContrib), averagedWeather.temp.add(tempContrib), averagedWeather.windVel.add(windVelContrib) ), currentWeightSum.add( - diffRadWeight, - dirRadWeight, + diffIrrWeight, + dirIrrWeight, tempWeight, windVelWeight ) ) } match { case (weatherData: WeatherData, weightSum: WeightSum) => - /* Divide by weight sum to correctly account for missing data. Change temperature scale back to absolute*/ - WeatherData( - weatherData.diffRad.divide(weightSum.diffRad), - weatherData.dirRad.divide(weightSum.dirRad), - weatherData.temp.divide(weightSum.temp), - weatherData.windVel.divide(weightSum.windVel) - ) + weightSum.scale(weatherData) } - } /** Determine an Array with all ticks between the request frame's start and @@ -227,7 +222,7 @@ private[weather] final case class WeatherSourceWrapper private ( } private[weather] object WeatherSourceWrapper extends LazyLogging { - private val DEFAULT_RESOLUTION = 360L + private val DEFAULT_RESOLUTION = 3600L def apply( csvSep: String, @@ -243,7 +238,7 @@ private[weather] object WeatherSourceWrapper extends LazyLogging { folderPath, new FileNamingStrategy(), idCoordinateSource, - buildFactory(timestampPattern, scheme) + buildFactory(scheme, timestampPattern) ) logger.info( "Successfully initiated CsvWeatherSource as source for WeatherSourceWrapper." @@ -274,7 +269,7 @@ private[weather] object WeatherSourceWrapper extends LazyLogging { idCoordinateSourceFunction(), couchbaseParams.coordinateColumnName, couchbaseParams.keyPrefix, - buildFactory(timestampPattern, scheme) + buildFactory(scheme, timestampPattern) ) logger.info( "Successfully initiated CouchbaseWeatherSource as source for WeatherSourceWrapper." @@ -299,7 +294,7 @@ private[weather] object WeatherSourceWrapper extends LazyLogging { val source = new InfluxDbWeatherSource( influxDb1xConnector, idCoordinateSource, - buildFactory(timestampPattern, scheme) + buildFactory(scheme, timestampPattern) ) logger.info( "Successfully initiated InfluxDbWeatherSource as source for WeatherSourceWrapper." @@ -328,8 +323,8 @@ private[weather] object WeatherSourceWrapper extends LazyLogging { sqlConnector, idCoordinateSource, sqlParams.schemaName, - sqlParams.weatherTableName, - buildFactory(timestampPattern, scheme) + sqlParams.tableName, + buildFactory(scheme, timestampPattern) ) logger.info( "Successfully initiated SqlWeatherSource as source for WeatherSourceWrapper." @@ -341,66 +336,86 @@ private[weather] object WeatherSourceWrapper extends LazyLogging { ) } - private def buildFactory(timestampPattern: Option[String], scheme: String) = { - timestampPattern match { - case None => initWeatherFactory(scheme) - case Some(timeStampPattern) => - initWeatherFactory(scheme, timeStampPattern) - } - } - - private def initWeatherFactory(scheme: String) = - Try(WeatherScheme(scheme)) match { - case Failure(_) => - throw new InitializationException( - s"Error while initializing WeatherFactory for weather source wrapper: '$scheme' is not a weather scheme. Supported schemes:\n\t${WeatherScheme.values - .mkString("\n\t")}'" - ) - case Success(WeatherScheme.ICON) => new IconTimeBasedWeatherValueFactory() - case Success(WeatherScheme.PSDM) => new PsdmTimeBasedWeatherValueFactory() - case Success(unknownScheme) => - throw new InitializationException( - s"Error while initializing WeatherFactory for weather source wrapper: weather scheme '$unknownScheme' is not an expected input." - ) - } - - private def initWeatherFactory(scheme: String, timeStampPattern: String) = + private def buildFactory(scheme: String, timestampPattern: Option[String]) = Try(WeatherScheme(scheme)) match { - case Failure(_) => + case Failure(exception) => throw new InitializationException( s"Error while initializing WeatherFactory for weather source wrapper: '$scheme' is not a weather scheme. Supported schemes:\n\t${WeatherScheme.values - .mkString("\n\t")}'" + .mkString("\n\t")}'", + exception ) case Success(WeatherScheme.ICON) => - new IconTimeBasedWeatherValueFactory(timeStampPattern) - case Success(WeatherScheme.PSDM) => - new PsdmTimeBasedWeatherValueFactory(timeStampPattern) + timestampPattern + .map(new IconTimeBasedWeatherValueFactory(_)) + .getOrElse(new IconTimeBasedWeatherValueFactory()) + case Success(WeatherScheme.COSMO) => + timestampPattern + .map(new CosmoTimeBasedWeatherValueFactory(_)) + .getOrElse(new CosmoTimeBasedWeatherValueFactory()) case Success(unknownScheme) => throw new InitializationException( s"Error while initializing WeatherFactory for weather source wrapper: weather scheme '$unknownScheme' is not an expected input." ) } + /** Simple container class to allow for accumulating determination of the sum + * of weights for different weather properties for different locations + * surrounding a given coordinate of interest + * + * @param diffIrr + * Sum of weight for diffuse irradiance + * @param dirIrr + * Sum of weight for direct irradiance + * @param temp + * Sum of weight for temperature + * @param windVel + * Sum of weight for wind velocity + */ final case class WeightSum( - diffRad: Double, - dirRad: Double, + diffIrr: Double, + dirIrr: Double, temp: Double, windVel: Double ) { def add( - diffRad: Double, - dirRad: Double, + diffIrr: Double, + dirIrr: Double, temp: Double, windVel: Double ): WeightSum = WeightSum( - this.diffRad + diffRad, - this.dirRad + dirRad, + this.diffIrr + diffIrr, + this.dirIrr + dirIrr, this.temp + temp, this.windVel + windVel ) + + /** Scale the given [[WeatherData]] by dividing by the sum of weights per + * attribute of the weather data. If one of the weight sums is empty (and + * thus a division by zero would happen) the defined "empty" information + * for this attribute is returned. + * + * @param weatherData + * Weighted and accumulated weather information + * @return + * Weighted weather information, which are divided by the sum of weights + */ + def scale(weatherData: WeatherData): WeatherData = weatherData match { + case WeatherData(diffIrr, dirIrr, temp, windVel) => + implicit val precision: Double = 1e-3 + WeatherData( + if (this.diffIrr !~= 0d) diffIrr.divide(this.diffIrr) + else EMPTY_WEATHER_DATA.diffIrr, + if (this.dirIrr !~= 0d) dirIrr.divide(this.dirIrr) + else EMPTY_WEATHER_DATA.dirIrr, + if (this.temp !~= 0d) temp.divide(this.temp) + else EMPTY_WEATHER_DATA.temp, + if (this.windVel !~= 0d) windVel.divide(this.windVel) + else EMPTY_WEATHER_DATA.windVel + ) + } } - case object WeightSum { + object WeightSum { val EMPTY_WEIGHT_SUM: WeightSum = WeightSum(0d, 0d, 0d, 0d) } diff --git a/src/main/scala/edu/ie3/simona/sim/SimonaSim.scala b/src/main/scala/edu/ie3/simona/sim/SimonaSim.scala index e71227c906..4358fa2581 100644 --- a/src/main/scala/edu/ie3/simona/sim/SimonaSim.scala +++ b/src/main/scala/edu/ie3/simona/sim/SimonaSim.scala @@ -36,8 +36,8 @@ import edu.ie3.simona.sim.SimonaSim.{ import edu.ie3.simona.sim.setup.{ExtSimSetupData, SimonaSetup} import scala.concurrent.ExecutionContext.Implicits.global -import scala.concurrent.{Await, Future} import scala.concurrent.duration.{DurationInt, FiniteDuration} +import scala.concurrent.{Await, Future} import scala.language.postfixOps /** Main entrance point to a simona simulation as top level actor. This actors diff --git a/src/main/scala/edu/ie3/simona/util/ConfigUtil.scala b/src/main/scala/edu/ie3/simona/util/ConfigUtil.scala index b4703c2873..d4ee521750 100644 --- a/src/main/scala/edu/ie3/simona/util/ConfigUtil.scala +++ b/src/main/scala/edu/ie3/simona/util/ConfigUtil.scala @@ -321,11 +321,7 @@ object ConfigUtil { logger.info( "Password for SQL weather source is empty. This is allowed, but not common. Please check if this an intended setting." ) - if (sql.timeColumnName.isEmpty) - throw new InvalidConfigParameterException( - "Time column for SQL weather source cannot be empty" - ) - if (sql.weatherTableName.isEmpty) + if (sql.tableName.isEmpty) throw new InvalidConfigParameterException( "Weather table name for SQL weather source cannot be empty" ) diff --git a/src/main/scala/edu/ie3/util/scala/DoubleUtils.scala b/src/main/scala/edu/ie3/util/scala/DoubleUtils.scala new file mode 100644 index 0000000000..67bc7cf7cf --- /dev/null +++ b/src/main/scala/edu/ie3/util/scala/DoubleUtils.scala @@ -0,0 +1,17 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.util.scala + +@deprecated("Use implementation in power system utils package") +object DoubleUtils { + implicit class ImplicitDouble(d: Double) { + def ~=(other: Double)(implicit precision: Double): Boolean = + (d - other).abs <= precision + def !~=(other: Double)(implicit precision: Double): Boolean = + (d - other).abs > precision + } +} diff --git a/src/test/groovy/edu/ie3/simona/model/participant/PVModelIT.groovy b/src/test/groovy/edu/ie3/simona/model/participant/PVModelIT.groovy index 90002b66e6..2dd49af64d 100644 --- a/src/test/groovy/edu/ie3/simona/model/participant/PVModelIT.groovy +++ b/src/test/groovy/edu/ie3/simona/model/participant/PVModelIT.groovy @@ -88,7 +88,7 @@ class PVModelIT extends Specification implements PVModelITHelper { "build the needed data" WeatherMessage.WeatherData weather = modelToWeatherMap.get(modelId) - PVModel.PVRelevantData neededData = new PVModel.PVRelevantData(dateTime,3600L, weather.diffRad() as ComparableQuantity, weather.dirRad() as ComparableQuantity) + PVModel.PVRelevantData neededData = new PVModel.PVRelevantData(dateTime,3600L, weather.diffIrr() as ComparableQuantity, weather.dirIrr() as ComparableQuantity) ComparableQuantity voltage = getQuantity(1.414213562, PU) "collect the results and calculate the difference between the provided results and the calculated ones" @@ -153,7 +153,7 @@ trait PVModelITHelper { inputModel.getAlbedo(), inputModel.getEtaConv(), inputModel.getAzimuth(), - inputModel.getHeight(), + inputModel.getElevationAngle(), getQuantity(1d, SQUARE_METRE) ) diff --git a/src/test/groovy/edu/ie3/simona/model/participant/PVModelTest.groovy b/src/test/groovy/edu/ie3/simona/model/participant/PVModelTest.groovy index 6cb8535c2c..6aac83d271 100644 --- a/src/test/groovy/edu/ie3/simona/model/participant/PVModelTest.groovy +++ b/src/test/groovy/edu/ie3/simona/model/participant/PVModelTest.groovy @@ -108,7 +108,7 @@ class PVModelTest extends Specification { pvInput.getAlbedo(), pvInput.getEtaConv() as ComparableQuantity, getQuantity(Math.toRadians(pvInput.getAzimuth().getValue().doubleValue()), RADIAN), - getQuantity(Math.toRadians(pvInput.getHeight().getValue().doubleValue()), RADIAN), + getQuantity(Math.toRadians(pvInput.getElevationAngle().getValue().doubleValue()), RADIAN), getQuantity(1d, SQUARE_METRE) ) } diff --git a/src/test/resources/edu/ie3/simona/model/participant/pv/it/grid_data/pv_input.csv b/src/test/resources/edu/ie3/simona/model/participant/pv/it/grid_data/pv_input.csv index a594598bfc..b4ff778eaf 100644 --- a/src/test/resources/edu/ie3/simona/model/participant/pv/it/grid_data/pv_input.csv +++ b/src/test/resources/edu/ie3/simona/model/participant/pv/it/grid_data/pv_input.csv @@ -1,4 +1,4 @@ -"uuid";"albedo";"azimuth";"cosphi_rated";"eta_conv";"height";"id";"k_g";"k_t";"market_reaction";"operates_from";"operates_until";"s_rated";"q_characteristics";"node";"operator" +"uuid";"albedo";"azimuth";"cosphi_rated";"eta_conv";"elevation_angle";"id";"k_g";"k_t";"market_reaction";"operates_from";"operates_until";"s_rated";"q_characteristics";"node";"operator" 7ac5bb15-36ee-42b0-902b-9cd520e241b3;0.2;16.09490984119475;0.95;91.23978812713176;51.75144341774285;pv_south_1;0.9;1;false;;;100;cosPhiFixed:{(0.00,1.0)};022a94c6-2d60-4400-875c-ab9db1ae2736; 939d254a-98b9-43d9-939d-dac9d91e7d73;0.2;-11.883286549709737;0.95;93.55452200165019;50.710754711180925;pv_south_2;0.9;1;false;;;100;cosPhiFixed:{(0.00,1.0)};9a2524f1-3639-4e90-a547-81a259712f8c; e3b34366-9a4b-4e8f-b46d-fccdd3c318b3;0.2;-3.6445723846554756;0.95;90.07983175106347;50.727743320167065;pv_south_3;0.9;1;false;;;100;cosPhiFixed:{(0.00,1.0)};9354b02c-a4a9-4e9d-905a-e48110b04d88; diff --git a/src/test/resources/edu/ie3/simona/service/primary/timeseries/time_series_mapping.sql b/src/test/resources/edu/ie3/simona/service/primary/timeseries/time_series_mapping.sql new file mode 100644 index 0000000000..b3921f442c --- /dev/null +++ b/src/test/resources/edu/ie3/simona/service/primary/timeseries/time_series_mapping.sql @@ -0,0 +1,15 @@ +CREATE TABLE public.time_series_mapping +( + uuid uuid PRIMARY KEY, + participant uuid, + time_series uuid +) + WITHOUT OIDS + TABLESPACE pg_default; + +INSERT INTO + public.time_series_mapping (uuid, participant, time_series) +VALUES +('58167015-d760-4f90-8109-f2ebd94cda91', 'b86e95b0-e579-4a80-a534-37c7a470a409', '9185b8c1-86ba-4a16-8dea-5ac898e8caa5'), +('9a9ebfda-dc26-4a40-b9ca-25cd42f6cc3f', 'c7ebcc6c-55fc-479b-aa6b-6fa82ccac6b8', '3fbfaa97-cff4-46d4-95ba-a95665e87c26'), +('9c1c53ea-e575-41a2-a373-a8b2d3ed2c39', '90a96daa-012b-4fea-82dc-24ba7a7ab81c', '3fbfaa97-cff4-46d4-95ba-a95665e87c26'); diff --git a/src/test/resources/edu/ie3/simona/service/primary/timeseries/time_series_p.sql b/src/test/resources/edu/ie3/simona/service/primary/timeseries/time_series_p.sql new file mode 100644 index 0000000000..b17b091eac --- /dev/null +++ b/src/test/resources/edu/ie3/simona/service/primary/timeseries/time_series_p.sql @@ -0,0 +1,24 @@ +CREATE TABLE public.time_series_p +( + uuid uuid PRIMARY KEY, + time_series uuid NOT NULL, + time timestamp with time zone NOT NULL, + p double precision NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE INDEX time_series_p_series_id ON time_series_p USING hash (time_series); + +-- Order of columns is important when using btree: https://www.postgresql.org/docs/14/indexes-multicolumn.html +-- Column time_series needs to placed as the first argument since we at most use an equality constraint on +-- time_series and a range query on time. +CREATE UNIQUE INDEX time_series_p_series_time ON time_series_p USING btree (time_series, time); + +INSERT INTO + public.time_series_p (uuid, time_series, time, p) +VALUES +('0245d599-9a5c-4c32-9613-5b755fac8ca0', '9185b8c1-86ba-4a16-8dea-5ac898e8caa5', '2020-01-01 00:00:00+0', 1000.0), +('a5e27652-9024-4a93-9d2a-590fbc3ab5a1', '9185b8c1-86ba-4a16-8dea-5ac898e8caa5', '2020-01-01 00:15:00+0', 1250.0), +('b4a2b3e0-7215-431b-976e-d8b41c7bc71b', 'b669e4bf-a351-4067-860d-d5f224b62247', '2020-01-01 00:00:00+0', 50.0), +('1c8f072c-c833-47da-a3e9-5f4d305ab926', 'b669e4bf-a351-4067-860d-d5f224b62247', '2020-01-01 00:15:00+0', 100.0); diff --git a/src/test/resources/edu/ie3/simona/service/primary/timeseries/time_series_pqh.sql b/src/test/resources/edu/ie3/simona/service/primary/timeseries/time_series_pqh.sql new file mode 100644 index 0000000000..fa23010ce0 --- /dev/null +++ b/src/test/resources/edu/ie3/simona/service/primary/timeseries/time_series_pqh.sql @@ -0,0 +1,24 @@ +CREATE TABLE public.time_series_pqh +( + uuid uuid PRIMARY KEY, + time_series uuid NOT NULL, + time timestamp with time zone NOT NULL, + p double precision NOT NULL, + q double precision NOT NULL, + heat_demand double precision NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE INDEX time_series_pqh_series_id ON time_series_pqh USING hash (time_series); + +-- Order of columns is important when using btree: https://www.postgresql.org/docs/14/indexes-multicolumn.html +-- Column time_series needs to placed as the first argument since we at most use an equality constraint on +-- time_series and a range query on time. +CREATE UNIQUE INDEX time_series_pqh_series_time ON time_series_pqh USING btree (time_series, time); + +INSERT INTO + public.time_series_pqh (uuid, time_series, time, p, q, heat_demand) +VALUES +('661ac594-47f0-4442-8d82-bbeede5661f7', '46be1e57-e4ed-4ef7-95f1-b2b321cb2047', '2020-01-01 00:00:00+0', 1000.0, 329.0, 8.0), +('5adcd6c5-a903-433f-b7b5-5fe669a3ed30', '46be1e57-e4ed-4ef7-95f1-b2b321cb2047', '2020-01-01 00:15:00+0', 1250.0, 411.0, 12.0); diff --git a/src/test/scala/edu/ie3/simona/agent/participant/PVAgentModelCalculationSpec.scala b/src/test/scala/edu/ie3/simona/agent/participant/PVAgentModelCalculationSpec.scala index cc54e60372..39edbf6921 100644 --- a/src/test/scala/edu/ie3/simona/agent/participant/PVAgentModelCalculationSpec.scala +++ b/src/test/scala/edu/ie3/simona/agent/participant/PVAgentModelCalculationSpec.scala @@ -585,8 +585,8 @@ class PVAgentModelCalculationSpec 0L -> PVRelevantData( 0L.toDateTime, 3600L, - weatherData.diffRad, - weatherData.dirRad + weatherData.diffIrr, + weatherData.dirIrr ) ) } @@ -737,8 +737,8 @@ class PVAgentModelCalculationSpec 0L -> PVRelevantData( 0L.toDateTime, 3600L, - weatherData.diffRad, - weatherData.dirRad + weatherData.diffIrr, + weatherData.dirIrr ) ) } diff --git a/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala b/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala index decc6d59cd..57a4144cc0 100644 --- a/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala +++ b/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala @@ -875,7 +875,7 @@ class ConfigFailFastSpec extends UnitSpec with ConfigTestData { ConfigFailFast invokePrivate checkWeatherDataSource( weatherDataSource ) - }.getMessage shouldBe "The weather data scheme 'this won't work' is not supported. Supported schemes:\n\ticon\n\tpsdm" + }.getMessage shouldBe "The weather data scheme 'this won't work' is not supported. Supported schemes:\n\ticon\n\tcosmo" } } diff --git a/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala b/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala index 60daa65d82..8a95b4fa64 100644 --- a/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala +++ b/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala @@ -10,7 +10,7 @@ import java.io.{File, FileInputStream} import java.util.zip.GZIPInputStream import akka.actor.ActorSystem import akka.stream.Materializer -import akka.testkit.{ImplicitSender, TestFSMRef, TestKit, TestProbe} +import akka.testkit.{TestFSMRef, TestProbe} import com.typesafe.config.ConfigFactory import edu.ie3.datamodel.models.result.connector.{ LineResult, @@ -25,11 +25,7 @@ import edu.ie3.simona.event.ResultEvent.{ ParticipantResultEvent, PowerFlowResultEvent } -import edu.ie3.simona.io.result.{ - ResultEntityCsvSink, - ResultEntitySink, - ResultSinkType -} +import edu.ie3.simona.io.result.{ResultEntitySink, ResultSinkType} import edu.ie3.simona.test.common.result.PowerFlowResultData import edu.ie3.simona.test.common.{AgentSpec, IOTestCommons, UnitSpec} import edu.ie3.simona.util.ResultFileHierarchy @@ -141,11 +137,27 @@ class ResultEventListenerSpec assert(outputFile.exists) assert(outputFile.isFile) } + + "check if actor dies when it should die" in { + val fileHierarchy = resultFileHierarchy(2, ".ttt") + val testProbe = TestProbe() + val listener = testProbe.childActorOf( + ResultEventListener.props( + Set(classOf[Transformer3WResult]), + fileHierarchy, + testProbe.ref + ) + ) + + testProbe watch listener + testProbe expectTerminated (listener, 2 seconds) + + } } "handling ordinary results" should { "process a valid participants result correctly" in { - val specificOutputFileHierarchy = resultFileHierarchy(2, ".csv") + val specificOutputFileHierarchy = resultFileHierarchy(3, ".csv") val listenerRef = system.actorOf( ResultEventListener @@ -192,7 +204,7 @@ class ResultEventListenerSpec } "process a valid power flow result correctly" in { - val specificOutputFileHierarchy = resultFileHierarchy(3, ".csv") + val specificOutputFileHierarchy = resultFileHierarchy(4, ".csv") val listenerRef = system.actorOf( ResultEventListener .props( @@ -280,7 +292,7 @@ class ResultEventListenerSpec PrivateMethod[Map[Transformer3wKey, AggregatedTransformer3wResult]]( Symbol("registerPartialTransformer3wResult") ) - val fileHierarchy = resultFileHierarchy(4, ".csv") + val fileHierarchy = resultFileHierarchy(5, ".csv") val listener = TestFSMRef( new ResultEventListener( Set(classOf[Transformer3WResult]), @@ -510,7 +522,7 @@ class ResultEventListenerSpec "shutting down" should { "shutdown and compress the data when requested to do so without any errors" in { - val specificOutputFileHierarchy = resultFileHierarchy(5, ".csv.gz") + val specificOutputFileHierarchy = resultFileHierarchy(6, ".csv.gz") val listenerRef = system.actorOf( ResultEventListener .props( diff --git a/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceProxySpec.scala b/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceProxySpec.scala index dae2702292..c8a708af7a 100644 --- a/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceProxySpec.scala +++ b/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceProxySpec.scala @@ -10,8 +10,9 @@ import akka.actor.{ActorRef, ActorSystem, PoisonPill} import akka.testkit.{TestActorRef, TestProbe} import akka.util.Timeout import com.typesafe.config.ConfigFactory -import edu.ie3.datamodel.io.csv.timeseries.ColumnScheme +import edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation import edu.ie3.datamodel.io.naming.FileNamingStrategy +import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation import edu.ie3.datamodel.io.source.TimeSeriesMappingSource import edu.ie3.datamodel.io.source.csv.CsvTimeSeriesMappingSource import edu.ie3.datamodel.models.value.{SValue, Value} @@ -33,22 +34,23 @@ import edu.ie3.simona.ontology.messages.SchedulerMessage.{ ScheduleTriggerMessage, TriggerWithIdMessage } +import edu.ie3.simona.ontology.messages.services.ServiceMessage.RegistrationResponseMessage.RegistrationFailedMessage import edu.ie3.simona.ontology.messages.services.ServiceMessage.{ PrimaryServiceRegistrationMessage, WorkerRegistrationMessage } -import edu.ie3.simona.ontology.messages.services.ServiceMessage.RegistrationResponseMessage.RegistrationFailedMessage import edu.ie3.simona.ontology.trigger.Trigger.InitializeServiceTrigger -import edu.ie3.simona.service.primary.PrimaryServiceWorker.{ - CsvInitPrimaryServiceStateData, - InitPrimaryServiceStateData -} import edu.ie3.simona.service.primary.PrimaryServiceProxy.{ InitPrimaryServiceProxyStateData, PrimaryServiceStateData, SourceRef } +import edu.ie3.simona.service.primary.PrimaryServiceWorker.{ + CsvInitPrimaryServiceStateData, + InitPrimaryServiceStateData +} import edu.ie3.simona.test.common.AgentSpec +import edu.ie3.simona.test.common.input.TimeSeriesTestData import edu.ie3.util.TimeUtil import org.scalatest.PartialFunctionValues import org.scalatest.prop.TableDrivenPropertyChecks @@ -57,8 +59,8 @@ import java.nio.file.Paths import java.time.ZonedDateTime import java.util.concurrent.TimeUnit import java.util.{Objects, UUID} -import scala.util.{Failure, Success, Try} import scala.concurrent.ExecutionContext.Implicits.global +import scala.util.{Failure, Success, Try} class PrimaryServiceProxySpec extends AgentSpec( @@ -72,7 +74,8 @@ class PrimaryServiceProxySpec ) ) with TableDrivenPropertyChecks - with PartialFunctionValues { + with PartialFunctionValues + with TimeSeriesTestData { // this works both on Windows and Unix systems val baseDirectoryPath: String = Paths .get( @@ -103,30 +106,19 @@ class PrimaryServiceProxySpec baseDirectoryPath, fileNamingStrategy ) - val workerId: String = - "PrimaryService_3fbfaa97-cff4-46d4-95ba-a95665e87c26" + val workerId: String = "PrimaryService_" + uuidPq val modelUuid: UUID = UUID.fromString("c7ebcc6c-55fc-479b-aa6b-6fa82ccac6b8") - val timeSeriesUuid: UUID = - UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") val simulationStart: ZonedDateTime = TimeUtil.withDefaults.toZonedDateTime("2021-03-17 13:14:00") val proxyStateData: PrimaryServiceStateData = PrimaryServiceStateData( Map( - UUID.fromString("b86e95b0-e579-4a80-a534-37c7a470a409") -> UUID - .fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5"), - modelUuid -> UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26"), - UUID.fromString("90a96daa-012b-4fea-82dc-24ba7a7ab81c") -> UUID - .fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") + UUID.fromString("b86e95b0-e579-4a80-a534-37c7a470a409") -> uuidP, + modelUuid -> uuidPq, + UUID.fromString("90a96daa-012b-4fea-82dc-24ba7a7ab81c") -> uuidPq ), Map( - UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5") -> SourceRef( - ColumnScheme.ACTIVE_POWER, - None - ), - UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") -> SourceRef( - ColumnScheme.APPARENT_POWER, - None - ) + uuidP -> SourceRef(metaP, None), + uuidPq -> SourceRef(metaPq, None) ), simulationStart, validPrimaryConfig, @@ -147,7 +139,7 @@ class PrimaryServiceProxySpec val exception = intercept[InvalidConfigParameterException]( PrimaryServiceProxy.checkConfig(maliciousConfig) ) - exception.getMessage shouldBe "2 time series source types defined. Please define only one type!\nAvailable types:\n\tcsv" + exception.getMessage shouldBe "2 time series source types defined. Please define only one type!\nAvailable types:\n\tcsv\n\tsql" } "lead to complaining about too few source definitions" in { @@ -161,7 +153,7 @@ class PrimaryServiceProxySpec val exception = intercept[InvalidConfigParameterException]( PrimaryServiceProxy.checkConfig(maliciousConfig) ) - exception.getMessage shouldBe "No time series source type defined. Please define exactly one type!\nAvailable types:\n\tcsv" + exception.getMessage shouldBe "No time series source type defined. Please define exactly one type!\nAvailable types:\n\tcsv\n\tsql" } "not let couchbase parameters pass for mapping configuration" in { @@ -175,7 +167,7 @@ class PrimaryServiceProxySpec val exception = intercept[InvalidConfigParameterException]( PrimaryServiceProxy.checkConfig(maliciousConfig) ) - exception.getMessage shouldBe "Invalid configuration 'CouchbaseParams(,,,,,,)' for a time series source.\nAvailable types:\n\tcsv" + exception.getMessage shouldBe "Invalid configuration 'CouchbaseParams(,,,,,,)' for a time series source.\nAvailable types:\n\tcsv\n\tsql" } "let csv parameters pass for mapping configuration" in { @@ -202,24 +194,10 @@ class PrimaryServiceProxySpec val exception = intercept[InvalidConfigParameterException]( PrimaryServiceProxy.checkConfig(maliciousConfig) ) - exception.getMessage shouldBe "Invalid configuration 'InfluxDb1xParams(,0,,)' for a time series source.\nAvailable types:\n\tcsv" + exception.getMessage shouldBe "Invalid configuration 'InfluxDb1xParams(,0,,)' for a time series source.\nAvailable types:\n\tcsv\n\tsql" } - "not let sql parameters pass for mapping configuration" in { - val maliciousConfig = PrimaryConfig( - None, - None, - None, - Some(SqlParams("", "", "", "", "", "", "")) - ) - - val exception = intercept[InvalidConfigParameterException]( - PrimaryServiceProxy.checkConfig(maliciousConfig) - ) - exception.getMessage shouldBe "Invalid configuration 'SqlParams(,,,,,,)' for a time series source.\nAvailable types:\n\tcsv" - } - - "fails on invalid time pattern" in { + "fails on invalid time pattern with csv" in { val invalidTimePatternConfig = PrimaryConfig( None, Some(CsvParams("", "", "xYz")), @@ -234,7 +212,7 @@ class PrimaryServiceProxySpec } - "succeeds on valid time pattern" in { + "succeeds on valid time pattern with csv" in { val validTimePatternConfig = PrimaryConfig( None, Some(CsvParams("", "", "yyyy-MM-dd'T'HH:mm'Z[UTC]'")), @@ -281,8 +259,8 @@ class PrimaryServiceProxySpec val maliciousConfig = PrimaryConfig( None, None, - None, - Some(SqlParams("", "", "", "", "", "", "")) + Some(InfluxDb1xParams("", -1, "", "")), + None ) proxy invokePrivate prepareStateData( @@ -293,7 +271,7 @@ class PrimaryServiceProxySpec fail("Building state data with missing config should fail") case Failure(exception) => exception.getClass shouldBe classOf[IllegalArgumentException] - exception.getMessage shouldBe "Unsupported config for mapping source: 'SqlParams(,,,,,,)'" + exception.getMessage shouldBe "Unsupported config for mapping source: 'InfluxDb1xParams(,-1,,)'" } } @@ -312,24 +290,13 @@ class PrimaryServiceProxySpec ) ) => modelToTimeSeries shouldBe Map( - UUID.fromString("b86e95b0-e579-4a80-a534-37c7a470a409") -> UUID - .fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5"), - UUID.fromString("c7ebcc6c-55fc-479b-aa6b-6fa82ccac6b8") -> UUID - .fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26"), - UUID.fromString("90a96daa-012b-4fea-82dc-24ba7a7ab81c") -> UUID - .fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") + UUID.fromString("b86e95b0-e579-4a80-a534-37c7a470a409") -> uuidP, + UUID.fromString("c7ebcc6c-55fc-479b-aa6b-6fa82ccac6b8") -> uuidPq, + UUID.fromString("90a96daa-012b-4fea-82dc-24ba7a7ab81c") -> uuidPq ) timeSeriesToSourceRef shouldBe Map( - UUID - .fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5") -> SourceRef( - ColumnScheme.ACTIVE_POWER, - None - ), - UUID - .fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") -> SourceRef( - ColumnScheme.APPARENT_POWER, - None - ) + uuidP -> SourceRef(metaP, None), + uuidPq -> SourceRef(metaPq, None) ) simulationStart shouldBe this.simulationStart primaryConfig shouldBe validPrimaryConfig @@ -372,8 +339,7 @@ class PrimaryServiceProxySpec val workerRef = proxy invokePrivate classToWorkerRef( testClass, - workerId, - simulationStart + workerId ) Objects.nonNull(workerRef) shouldBe true @@ -385,12 +351,15 @@ class PrimaryServiceProxySpec val toInitData = PrivateMethod[Try[InitPrimaryServiceStateData]]( Symbol("toInitData") ) + val metaInformation = new CsvIndividualTimeSeriesMetaInformation( + metaPq, + "its_pq_" + uuidPq + ) proxy invokePrivate toInitData( - validPrimaryConfig, - mappingSource, - timeSeriesUuid, - simulationStart + metaInformation, + simulationStart, + validPrimaryConfig ) match { case Success( CsvInitPrimaryServiceStateData( @@ -403,11 +372,11 @@ class PrimaryServiceProxySpec timePattern ) ) => - actualTimeSeriesUuid shouldBe timeSeriesUuid + actualTimeSeriesUuid shouldBe uuidPq actualSimulationStart shouldBe simulationStart actualCsvSep shouldBe csvSep directoryPath shouldBe baseDirectoryPath - filePath shouldBe "its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26" + filePath shouldBe metaInformation.getFullFilePath classOf[FileNamingStrategy].isAssignableFrom( fileNamingStrategy.getClass ) shouldBe true @@ -429,13 +398,10 @@ class PrimaryServiceProxySpec None, None ) - proxy invokePrivate initializeWorker( - ColumnScheme.APPARENT_POWER, - timeSeriesUuid, + metaPq, simulationStart, - maliciousPrimaryConfig, - mappingSource + maliciousPrimaryConfig ) match { case Failure(exception) => /* Check the exception */ @@ -468,35 +434,32 @@ class PrimaryServiceProxySpec TestActorRef(new PrimaryServiceProxy(scheduler.ref, simulationStart) { override protected def classToWorkerRef[V <: Value]( valueClass: Class[V], - timeSeriesUuid: String, - simulationStart: ZonedDateTime + timeSeriesUuid: String ): ActorRef = testProbe.ref // needs to be overwritten as to make it available to the private method tester @SuppressWarnings(Array("NoOpOverride")) override protected def initializeWorker( - columnScheme: ColumnScheme, - timeSeriesUuid: UUID, + metaInformation: IndividualTimeSeriesMetaInformation, simulationStart: ZonedDateTime, - primaryConfig: PrimaryConfig, - mappingSource: TimeSeriesMappingSource + primaryConfig: PrimaryConfig ): Try[ActorRef] = super.initializeWorker( - columnScheme, - timeSeriesUuid, + metaInformation, simulationStart, - primaryConfig, - mappingSource + primaryConfig ) }) val fakeProxy: PrimaryServiceProxy = fakeProxyRef.underlyingActor + val metaInformation = new CsvIndividualTimeSeriesMetaInformation( + metaPq, + "its_pq_" + uuidPq + ) fakeProxy invokePrivate initializeWorker( - ColumnScheme.APPARENT_POWER, - timeSeriesUuid, + metaInformation, simulationStart, - validPrimaryConfig, - mappingSource + validPrimaryConfig ) match { case Success(workerRef) => /* Check, if expected init message has been sent */ @@ -515,11 +478,11 @@ class PrimaryServiceProxySpec ), actorToBeScheduled ) => - actualTimeSeriesUuid shouldBe timeSeriesUuid + actualTimeSeriesUuid shouldBe uuidPq actualSimulationStart shouldBe simulationStart actualCsvSep shouldBe csvSep directoryPath shouldBe baseDirectoryPath - filePath shouldBe "its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26" + filePath shouldBe metaInformation.getFullFilePath classOf[FileNamingStrategy].isAssignableFrom( fileNamingStrategy.getClass ) shouldBe true @@ -556,7 +519,7 @@ class PrimaryServiceProxySpec "work otherwise" in { proxy invokePrivate updateStateData( proxyStateData, - timeSeriesUuid, + uuidPq, self ) match { case PrimaryServiceStateData( @@ -568,16 +531,8 @@ class PrimaryServiceProxySpec ) => modelToTimeSeries shouldBe proxyStateData.modelToTimeSeries timeSeriesToSourceRef shouldBe Map( - UUID - .fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5") -> SourceRef( - ColumnScheme.ACTIVE_POWER, - None - ), - UUID - .fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") -> SourceRef( - ColumnScheme.APPARENT_POWER, - Some(self) - ) + uuidP -> SourceRef(metaP, None), + uuidPq -> SourceRef(metaPq, Some(self)) ) simulationStart shouldBe proxyStateData.simulationStart primaryConfig shouldBe proxyStateData.primaryConfig @@ -595,7 +550,7 @@ class PrimaryServiceProxySpec proxy invokePrivate handleCoveredModel( modelUuid, - timeSeriesUuid, + uuidPq, maliciousStateData, self ) @@ -605,13 +560,13 @@ class PrimaryServiceProxySpec "forward the registration request, if worker is already known" in { val adaptedStateData = proxyStateData.copy( timeSeriesToSourceRef = Map( - timeSeriesUuid -> SourceRef(ColumnScheme.APPARENT_POWER, Some(self)) + uuidPq -> SourceRef(metaPq, Some(self)) ) ) proxy invokePrivate handleCoveredModel( modelUuid, - timeSeriesUuid, + uuidPq, adaptedStateData, self ) @@ -630,7 +585,7 @@ class PrimaryServiceProxySpec proxy invokePrivate handleCoveredModel( modelUuid, - timeSeriesUuid, + uuidPq, maliciousStateData, self ) @@ -643,11 +598,9 @@ class PrimaryServiceProxySpec val fakeProxyRef = TestActorRef(new PrimaryServiceProxy(self, simulationStart) { override protected def initializeWorker( - columnScheme: ColumnScheme, - timeSeriesUuid: UUID, + metaInformation: IndividualTimeSeriesMetaInformation, simulationStart: ZonedDateTime, - primaryConfig: PrimaryConfig, - mappingSource: TimeSeriesMappingSource + primaryConfig: PrimaryConfig ): Try[ActorRef] = Success(probe.ref) // needs to be overwritten as to make it available to the private method tester @@ -669,7 +622,7 @@ class PrimaryServiceProxySpec fakeProxy invokePrivate handleCoveredModel( modelUuid, - timeSeriesUuid, + uuidPq, proxyStateData, self ) @@ -693,11 +646,9 @@ class PrimaryServiceProxySpec val fakeProxyRef = TestActorRef(new PrimaryServiceProxy(self, simulationStart) { override protected def initializeWorker( - columnScheme: ColumnScheme, - timeSeriesUuid: UUID, + metaInformation: IndividualTimeSeriesMetaInformation, simulationStart: ZonedDateTime, - primaryConfig: PrimaryConfig, - mappingSource: TimeSeriesMappingSource + primaryConfig: PrimaryConfig ): Try[ActorRef] = Success(probe.ref) }) diff --git a/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceProxySqlIT.scala b/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceProxySqlIT.scala new file mode 100644 index 0000000000..9a3f97e46f --- /dev/null +++ b/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceProxySqlIT.scala @@ -0,0 +1,199 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.service.primary + +import akka.actor.ActorSystem +import akka.testkit.{TestActorRef, TestProbe} +import com.dimafeng.testcontainers.{ForAllTestContainer, PostgreSQLContainer} +import com.typesafe.config.ConfigFactory +import edu.ie3.simona.config.SimonaConfig +import edu.ie3.simona.config.SimonaConfig.Simona.Input.Primary.SqlParams +import edu.ie3.simona.ontology.messages.SchedulerMessage.{ + CompletionMessage, + ScheduleTriggerMessage, + TriggerWithIdMessage +} +import edu.ie3.simona.ontology.messages.services.ServiceMessage.PrimaryServiceRegistrationMessage +import edu.ie3.simona.ontology.messages.services.ServiceMessage.RegistrationResponseMessage.{ + RegistrationFailedMessage, + RegistrationSuccessfulMessage +} +import edu.ie3.simona.ontology.trigger.Trigger.{ + ActivityStartTrigger, + InitializeServiceTrigger +} +import edu.ie3.simona.service.primary.PrimaryServiceProxy.InitPrimaryServiceProxyStateData +import edu.ie3.simona.service.primary.PrimaryServiceWorker.SqlInitPrimaryServiceStateData +import edu.ie3.simona.test.common.AgentSpec +import edu.ie3.simona.test.helper.TestContainerHelper +import edu.ie3.util.TimeUtil +import org.scalatest.BeforeAndAfterAll + +import java.util.UUID + +class PrimaryServiceProxySqlIT + extends AgentSpec( + ActorSystem( + "PrimaryServiceWorkerSqlIT", + ConfigFactory + .parseString(""" + |akka.loglevel="OFF" + """.stripMargin) + ) + ) + with ForAllTestContainer + with BeforeAndAfterAll + with TestContainerHelper { + + override val container: PostgreSQLContainer = PostgreSQLContainer( + "postgres:14.2" + ) + + private val simulationStart = + TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:00:00") + + private val schemaName = "public" + + override protected def beforeAll(): Unit = { + // Copy sql import scripts into docker + val sqlImportFile = getMountableFile("timeseries/") + container.copyFileToContainer(sqlImportFile, "/home/") + + Iterable( + "time_series_p.sql", + "time_series_pqh.sql", + "time_series_mapping.sql" + ).foreach { file => + val res = container.execInContainer("psql", "-Utest", "-f/home/" + file) + res.getStderr shouldBe empty + } + } + + override protected def afterAll(): Unit = { + container.stop() + container.close() + } + + // function definition because postgres parameters are only available after initialization + private def sqlParams: SqlParams = SqlParams( + jdbcUrl = container.jdbcUrl, + userName = container.username, + password = container.password, + schemaName = schemaName, + timePattern = "yyyy-MM-dd HH:mm:ss" + ) + + "A primary service proxy with SQL source" should { + val scheduler = TestProbe("Scheduler") + + val proxyRef = TestActorRef( + PrimaryServiceProxy.props( + scheduler.ref, + simulationStart + ) + ) + + "initialize when given proper SQL input configs" in { + val initData = InitPrimaryServiceProxyStateData( + SimonaConfig.Simona.Input.Primary( + None, + None, + None, + sqlParams = Some(sqlParams) + ), + simulationStart + ) + + val triggerIdInit1 = 1L + + scheduler.send( + proxyRef, + TriggerWithIdMessage( + InitializeServiceTrigger(initData), + triggerIdInit1, + proxyRef + ) + ) + + scheduler.expectMsg( + CompletionMessage( + triggerIdInit1, + None + ) + ) + } + + "handle participant request correctly if participant has primary data" in { + val systemParticipantProbe = TestProbe("SystemParticipant") + + systemParticipantProbe.send( + proxyRef, + PrimaryServiceRegistrationMessage( + UUID.fromString("b86e95b0-e579-4a80-a534-37c7a470a409") + ) + ) + + val initTriggerMsg = scheduler.expectMsgType[ScheduleTriggerMessage] + + initTriggerMsg.trigger match { + case InitializeServiceTrigger( + sqlInit: SqlInitPrimaryServiceStateData + ) => + sqlInit.sqlParams shouldBe sqlParams + sqlInit.simulationStart shouldBe simulationStart + sqlInit.timeSeriesUuid shouldBe UUID.fromString( + "9185b8c1-86ba-4a16-8dea-5ac898e8caa5" + ) + case unexpected => fail(s"Received unexpected trigger $unexpected") + } + + val triggerIdInit2 = 2L + + // extract ref to the worker that the proxy created + val workerRef = initTriggerMsg.actorToBeScheduled + scheduler.send( + workerRef, + TriggerWithIdMessage( + initTriggerMsg.trigger, + triggerIdInit2, + workerRef + ) + ) + + scheduler.expectMsg( + CompletionMessage( + triggerIdInit2, + Some( + Seq( + ScheduleTriggerMessage( + ActivityStartTrigger(0L), + workerRef + ) + ) + ) + ) + ) + + systemParticipantProbe.expectMsg(RegistrationSuccessfulMessage(Some(0L))) + } + + "handle participant request correctly if participant does not have primary data" in { + val systemParticipantProbe = TestProbe("SystemParticipant") + + systemParticipantProbe.send( + proxyRef, + PrimaryServiceRegistrationMessage( + UUID.fromString("db958617-e49d-44d3-b546-5f7b62776afd") + ) + ) + + scheduler.expectNoMessage() + + systemParticipantProbe.expectMsg(RegistrationFailedMessage) + } + } +} diff --git a/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceWorkerSpec.scala b/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceWorkerSpec.scala index 53556678fa..2a7e0d816c 100644 --- a/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceWorkerSpec.scala +++ b/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceWorkerSpec.scala @@ -37,6 +37,7 @@ import edu.ie3.simona.service.primary.PrimaryServiceWorker.{ } import edu.ie3.simona.service.primary.PrimaryServiceWorkerSpec.WrongInitPrimaryServiceStateData import edu.ie3.simona.test.common.AgentSpec +import edu.ie3.simona.test.common.input.TimeSeriesTestData import edu.ie3.util.TimeUtil import edu.ie3.util.quantities.PowerSystemUnits import edu.ie3.util.scala.collection.immutable.SortedDistinctSeq @@ -56,7 +57,8 @@ class PrimaryServiceWorkerSpec |akka.loglevel="OFF" """.stripMargin) ) - ) { + ) + with TimeSeriesTestData { // this works both on Windows and Unix systems val baseDirectoryPath: String = Paths .get( @@ -68,15 +70,12 @@ class PrimaryServiceWorkerSpec ) .toString - private val simulationStart = - TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:00:00") - val validInitData: CsvInitPrimaryServiceStateData = CsvInitPrimaryServiceStateData( - timeSeriesUuid = UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5"), + timeSeriesUuid = uuidP, csvSep = ";", directoryPath = baseDirectoryPath, - filePath = "its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5", + filePath = "its_p_" + uuidP, fileNamingStrategy = new FileNamingStrategy(), simulationStart = TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:00:00"), @@ -88,8 +87,7 @@ class PrimaryServiceWorkerSpec TestActorRef( new PrimaryServiceWorker[PValue]( self, - classOf[PValue], - simulationStart + classOf[PValue] ) ) val service = serviceRef.underlyingActor @@ -106,13 +104,12 @@ class PrimaryServiceWorkerSpec "fail, if pointed to the wrong file" in { val maliciousInitData = CsvInitPrimaryServiceStateData( - timeSeriesUuid = - UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26"), + timeSeriesUuid = uuidPq, simulationStart = TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:00:00"), csvSep = ";", directoryPath = baseDirectoryPath, - filePath = "its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26", + filePath = "its_pq_" + uuidPq, fileNamingStrategy = new FileNamingStrategy(), timePattern = TimeUtil.withDefaults.getDtfPattern ) @@ -200,8 +197,8 @@ class PrimaryServiceWorkerSpec ";", baseDirectoryPath, new FileNamingStrategy(), - UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5"), - "its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5", + uuidP, + "its_p_" + uuidP, classOf[PValue], new TimeBasedSimpleValueFactory[PValue](classOf[PValue]) ), diff --git a/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceWorkerSqlIT.scala b/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceWorkerSqlIT.scala new file mode 100644 index 0000000000..a11ec6ae73 --- /dev/null +++ b/src/test/scala/edu/ie3/simona/service/primary/PrimaryServiceWorkerSqlIT.scala @@ -0,0 +1,204 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.service.primary + +import akka.actor.ActorSystem +import akka.testkit.{TestActorRef, TestProbe} +import com.dimafeng.testcontainers.{ForAllTestContainer, PostgreSQLContainer} +import com.typesafe.config.ConfigFactory +import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy +import edu.ie3.datamodel.models.StandardUnits +import edu.ie3.datamodel.models.value.{HeatAndSValue, PValue} +import edu.ie3.simona.agent.participant.data.Data.PrimaryData.{ + ActivePower, + ApparentPowerAndHeat +} +import edu.ie3.simona.config.SimonaConfig.Simona.Input.Primary.SqlParams +import edu.ie3.simona.ontology.messages.SchedulerMessage.{ + CompletionMessage, + ScheduleTriggerMessage, + TriggerWithIdMessage +} +import edu.ie3.simona.ontology.messages.services.ServiceMessage.RegistrationResponseMessage.RegistrationSuccessfulMessage +import edu.ie3.simona.ontology.messages.services.ServiceMessage.WorkerRegistrationMessage +import edu.ie3.simona.ontology.trigger.Trigger.{ + ActivityStartTrigger, + InitializeServiceTrigger +} +import edu.ie3.simona.service.primary.PrimaryServiceWorker.{ + ProvidePrimaryDataMessage, + SqlInitPrimaryServiceStateData +} +import edu.ie3.simona.test.common.AgentSpec +import edu.ie3.simona.test.common.input.TimeSeriesTestData +import edu.ie3.simona.test.helper.TestContainerHelper +import edu.ie3.util.TimeUtil +import org.scalatest.BeforeAndAfterAll +import org.scalatest.prop.TableDrivenPropertyChecks +import tech.units.indriya.quantity.Quantities + +class PrimaryServiceWorkerSqlIT + extends AgentSpec( + ActorSystem( + "PrimaryServiceWorkerSqlIT", + ConfigFactory + .parseString(""" + |akka.loglevel="OFF" + """.stripMargin) + ) + ) + with ForAllTestContainer + with BeforeAndAfterAll + with TableDrivenPropertyChecks + with TimeSeriesTestData + with TestContainerHelper { + + override val container: PostgreSQLContainer = PostgreSQLContainer( + "postgres:14.2" + ) + + private val simulationStart = + TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:00:00") + + private val schemaName = "public" + + override protected def beforeAll(): Unit = { + // Copy sql import scripts into docker + val sqlImportFile = getMountableFile("timeseries/") + container.copyFileToContainer(sqlImportFile, "/home/") + + Iterable("time_series_p.sql", "time_series_pqh.sql") + .foreach { file => + val res = container.execInContainer("psql", "-Utest", "-f/home/" + file) + res.getStderr shouldBe empty + } + } + + override protected def afterAll(): Unit = { + container.stop() + container.close() + } + + "A primary service actor with SQL source" should { + "initialize and send out data when activated" in { + val scheduler = TestProbe("Scheduler") + + val cases = Table( + ( + "service", + "uuid", + "firstTick", + "firstData", + "maybeNextTick" + ), + ( + PrimaryServiceWorker.props( + scheduler.ref, + classOf[HeatAndSValue] + ), + uuidPqh, + 0L, + ApparentPowerAndHeat( + Quantities.getQuantity(1000.0d, StandardUnits.ACTIVE_POWER_IN), + Quantities.getQuantity(329.0d, StandardUnits.REACTIVE_POWER_IN), + Quantities.getQuantity(8000.0, StandardUnits.HEAT_DEMAND_PROFILE) + ), + Some(900L) + ), + ( + PrimaryServiceWorker.props( + scheduler.ref, + classOf[PValue] + ), + uuidP, + 0L, + ActivePower( + Quantities.getQuantity(1000.0d, StandardUnits.ACTIVE_POWER_IN) + ), + Some(900L) + ) + ) + + forAll(cases) { + ( + service, + uuid, + firstTick, + firstData, + maybeNextTick + ) => + val serviceRef = TestActorRef(service) + + val initData = SqlInitPrimaryServiceStateData( + uuid, + simulationStart, + SqlParams( + jdbcUrl = container.jdbcUrl, + userName = container.username, + password = container.password, + schemaName = schemaName, + timePattern = "yyyy-MM-dd HH:mm:ss" + ), + new DatabaseNamingStrategy() + ) + + val triggerId1 = 1L + + scheduler.send( + serviceRef, + TriggerWithIdMessage( + InitializeServiceTrigger(initData), + triggerId1, + serviceRef + ) + ) + + scheduler.expectMsg( + CompletionMessage( + triggerId1, + Some( + Seq( + ScheduleTriggerMessage( + ActivityStartTrigger(firstTick), + serviceRef + ) + ) + ) + ) + ) + + val participant = TestProbe() + + participant.send( + serviceRef, + WorkerRegistrationMessage(participant.ref) + ) + participant.expectMsg(RegistrationSuccessfulMessage(Some(firstTick))) + + val triggerId2 = 2L + + scheduler.send( + serviceRef, + TriggerWithIdMessage( + ActivityStartTrigger(firstTick), + triggerId2, + serviceRef + ) + ) + + scheduler.expectMsgType[CompletionMessage] + + val dataMsg = participant.expectMsgType[ProvidePrimaryDataMessage] + dataMsg.tick shouldBe firstTick + dataMsg.data shouldBe firstData + dataMsg.nextDataTick shouldBe maybeNextTick + + scheduler.expectNoMessage() + } + } + } +} diff --git a/src/test/scala/edu/ie3/simona/service/weather/SampleWeatherSourceSpec.scala b/src/test/scala/edu/ie3/simona/service/weather/SampleWeatherSourceSpec.scala index 0b84a7bda7..ef9987a3fd 100644 --- a/src/test/scala/edu/ie3/simona/service/weather/SampleWeatherSourceSpec.scala +++ b/src/test/scala/edu/ie3/simona/service/weather/SampleWeatherSourceSpec.scala @@ -83,16 +83,16 @@ class SampleWeatherSourceSpec val actual = source invokePrivate getWeatherPrivate(tick) /* Units meet expectation */ - actual.diffRad.getUnit shouldBe StandardUnits.SOLAR_IRRADIANCE - actual.dirRad.getUnit shouldBe StandardUnits.SOLAR_IRRADIANCE + actual.diffIrr.getUnit shouldBe StandardUnits.SOLAR_IRRADIANCE + actual.dirIrr.getUnit shouldBe StandardUnits.SOLAR_IRRADIANCE actual.temp.getUnit shouldBe StandardUnits.TEMPERATURE actual.windVel.getUnit shouldBe StandardUnits.WIND_VELOCITY /* Values meet expectations */ - actual.diffRad should equalWithTolerance( + actual.diffIrr should equalWithTolerance( Quantities.getQuantity(72.7656, StandardUnits.SOLAR_IRRADIANCE) ) - actual.dirRad should equalWithTolerance( + actual.dirIrr should equalWithTolerance( Quantities.getQuantity(80.1172, StandardUnits.SOLAR_IRRADIANCE) ) actual.windVel should equalWithTolerance( @@ -108,14 +108,14 @@ class SampleWeatherSourceSpec WeightedCoordinates(Map(NodeInput.DEFAULT_GEO_POSITION -> 1d)) source.getWeather(tick, weightedCoordinates) match { - case WeatherData(diffRad, dirRad, temp, windVel) => - diffRad.getUnit shouldBe StandardUnits.SOLAR_IRRADIANCE - diffRad should equalWithTolerance( + case WeatherData(diffIrr, dirIrr, temp, windVel) => + diffIrr.getUnit shouldBe StandardUnits.SOLAR_IRRADIANCE + diffIrr should equalWithTolerance( Quantities.getQuantity(72.7656, StandardUnits.SOLAR_IRRADIANCE) ) - dirRad.getUnit shouldBe StandardUnits.SOLAR_IRRADIANCE - dirRad should equalWithTolerance( + dirIrr.getUnit shouldBe StandardUnits.SOLAR_IRRADIANCE + dirIrr should equalWithTolerance( Quantities.getQuantity(80.1172, StandardUnits.SOLAR_IRRADIANCE) ) diff --git a/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceWrapperSpec.scala b/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceWrapperSpec.scala index e619deb026..8b0502c6cf 100644 --- a/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceWrapperSpec.scala +++ b/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceWrapperSpec.scala @@ -16,14 +16,20 @@ import edu.ie3.datamodel.models.timeseries.individual.{ TimeBasedValue } import edu.ie3.datamodel.models.value.WeatherValue -import edu.ie3.simona.service.weather.WeatherSource.WeightedCoordinates +import edu.ie3.simona.ontology.messages.services.WeatherMessage.WeatherData +import edu.ie3.simona.service.weather.WeatherSource.{ + EMPTY_WEATHER_DATA, + WeightedCoordinates +} import edu.ie3.simona.service.weather.WeatherSourceSpec.DummyIdCoordinateSource +import edu.ie3.simona.service.weather.WeatherSourceWrapper.WeightSum import edu.ie3.simona.service.weather.WeatherSourceWrapperSpec._ import edu.ie3.simona.test.common.UnitSpec import edu.ie3.util.geo.GeoUtils import edu.ie3.util.interval.ClosedInterval import org.locationtech.jts.geom.Point import tech.units.indriya.quantity.Quantities +import tech.units.indriya.unit.Units import java.time.{ZoneId, ZonedDateTime} import java.util @@ -60,10 +66,10 @@ class WeatherSourceWrapperSpec extends UnitSpec { ) val result = source.getWeather(date.toEpochSecond, weightedCoordinates) val sumOfAll = 1 + 1 + 1 + 13 - result.dirRad should equalWithTolerance( + result.dirIrr should equalWithTolerance( Quantities.getQuantity(sumOfAll / 4, StandardUnits.SOLAR_IRRADIANCE) ) - result.diffRad should equalWithTolerance( + result.diffIrr should equalWithTolerance( Quantities.getQuantity(sumOfAll / 4, StandardUnits.SOLAR_IRRADIANCE) ) result.temp should equalWithTolerance( @@ -85,10 +91,10 @@ class WeatherSourceWrapperSpec extends UnitSpec { ) val result = source.getWeather(date.toEpochSecond, weightedCoordinates) val sumOfAll = 1 + 1 + 1 + 13 - result.dirRad should equalWithTolerance( + result.dirIrr should equalWithTolerance( Quantities.getQuantity(sumOfAll / 4, StandardUnits.SOLAR_IRRADIANCE) ) - result.diffRad should equalWithTolerance( + result.diffIrr should equalWithTolerance( Quantities.getQuantity(sumOfAll / 4, StandardUnits.SOLAR_IRRADIANCE) ) result.temp should equalWithTolerance( @@ -110,10 +116,10 @@ class WeatherSourceWrapperSpec extends UnitSpec { ) val result = source.getWeather(date.toEpochSecond, weightedCoordinates) val sumOfAll = 1 + 1 + 1 - result.dirRad should equalWithTolerance( + result.dirIrr should equalWithTolerance( Quantities.getQuantity(sumOfAll / 3, StandardUnits.SOLAR_IRRADIANCE) ) - result.diffRad should equalWithTolerance( + result.diffIrr should equalWithTolerance( Quantities.getQuantity(sumOfAll / 3, StandardUnits.SOLAR_IRRADIANCE) ) result.temp should equalWithTolerance( @@ -127,10 +133,10 @@ class WeatherSourceWrapperSpec extends UnitSpec { "calculate the correct weighted value for 1 coordinate with a weight of 1" in { val weightedCoordinates = WeightedCoordinates(Map(coordinate13 -> 1d)) val result = source.getWeather(date.toEpochSecond, weightedCoordinates) - result.dirRad should equalWithTolerance( + result.dirIrr should equalWithTolerance( Quantities.getQuantity(13, StandardUnits.SOLAR_IRRADIANCE) ) - result.diffRad should equalWithTolerance( + result.diffIrr should equalWithTolerance( Quantities.getQuantity(13, StandardUnits.SOLAR_IRRADIANCE) ) result.temp should equalWithTolerance( @@ -147,9 +153,142 @@ class WeatherSourceWrapperSpec extends UnitSpec { result.temp.getScale shouldBe Scale.ABSOLUTE } } + + "Handling the weighted weather" when { + "adding to the weight sum" should { + "produce correct results" in { + val weightSum = WeightSum(0.1d, 0.2d, 0.3d, 0.4d) + val weightSumAdded = weightSum.add(0.2d, 0.3d, 0.4d, 0.5d) + + weightSumAdded.diffIrr should ===(0.3d +- 1e-10) + weightSumAdded.dirIrr should ===(0.5d +- 1e-10) + weightSumAdded.temp should ===(0.7d +- 1e-10) + weightSumAdded.windVel should ===(0.9d +- 1e-10) + } + } + + "scaling the weighted attributes with the sum of weights" should { + "calculate proper information on proper input" in { + val weatherSeq = Seq( + (0.5, 0.75, 291d, 10d), + (12.3, 1.2, 293d, 12d), + (25.0, 5.7, 290d, 9d), + (26.3, 1.7, 289d, 11d) + ) + val weights = Seq( + (0.1, 0.2, 0.3, 0.4), + (0.25, 0.2, 0.25, 0.1), + (0.3, 0.4, 0.15, 0.05), + (0.35, 0.2, 0.3, 0.45) + ) + + val (weightedWeather, weightSum) = + prepareWeightTestData(weatherSeq, weights) + + weightSum.scale(weightedWeather) match { + case WeatherData(diffIrr, dirIrr, temp, windVel) => + diffIrr should equalWithTolerance( + Quantities.getQuantity(19.83, StandardUnits.SOLAR_IRRADIANCE), + 1e-6 + ) + dirIrr should equalWithTolerance( + Quantities.getQuantity(3.01, StandardUnits.SOLAR_IRRADIANCE), + 1e-6 + ) + temp should equalWithTolerance( + Quantities + .getQuantity(290.75, Units.KELVIN) + .to(StandardUnits.TEMPERATURE), + 1e-6 + ) + windVel should equalWithTolerance( + Quantities.getQuantity(10.6, StandardUnits.WIND_VELOCITY), + 1e-6 + ) + } + } + } + + "calculate proper input, if data is missing in one coordinate" in { + val weatherSeq = Seq( + (0.5, 0.75, 291d, 10d), + (12.3, 1.2, 293d, 12d), + (25.0, 5.7, 290d, 9d), + (26.3, 1.7, 289d, 11d) + ) + val weights = Seq( + (0.1, 0.2, 0d, 0.4), + (0.25, 0.2, 0d, 0.1), + (0.3, 0.4, 0d, 0.05), + (0.35, 0.2, 0d, 0.45) + ) + + val (weightedWeather, weightSum) = + prepareWeightTestData(weatherSeq, weights) + + weightSum.scale(weightedWeather) match { + case WeatherData(_, _, temp, _) => + temp shouldBe EMPTY_WEATHER_DATA.temp + } + } + + "return empty value for an attribute, if weight sum is zero" in { + val weatherSeq = Seq( + (0.5, 0.75, 291d, 10d), + (12.3, 1.2, 0d, 12d), + (25.0, 5.7, 290d, 9d), + (26.3, 1.7, 289d, 11d) + ) + val weights = Seq( + (0.1, 0.2, 0.3, 0.4), + (0.25, 0.2, 0d, 0.1), + (0.3, 0.4, 0.15, 0.05), + (0.35, 0.2, 0.3, 0.45) + ) + + val (weightedWeather, weightSum) = + prepareWeightTestData(weatherSeq, weights) + + weightSum.scale(weightedWeather) match { + case WeatherData(_, _, temp, _) => + temp should equalWithTolerance( + Quantities + .getQuantity(290d, Units.KELVIN) + .to(StandardUnits.TEMPERATURE) + ) + } + } + + "correctly calculate scaled properties if provided with varying weight components" in { + val weatherData = WeatherData( + Quantities.getQuantity(1.0, StandardUnits.SOLAR_IRRADIANCE), + Quantities.getQuantity(1.0, StandardUnits.SOLAR_IRRADIANCE), + Quantities.getQuantity(1.0, Units.KELVIN), + Quantities.getQuantity(1.0, StandardUnits.WIND_VELOCITY) + ) + val weightSum = WeightSum(0.25, 0.5, 0.8, 1.0) + + weightSum.scale(weatherData) match { + case WeatherData(diffIrr, dirIrr, temp, windVel) => + diffIrr should equalWithTolerance( + Quantities.getQuantity(4.0, StandardUnits.SOLAR_IRRADIANCE) + ) + dirIrr should equalWithTolerance( + Quantities.getQuantity(2.0, StandardUnits.SOLAR_IRRADIANCE) + ) + temp should equalWithTolerance( + Quantities + .getQuantity(1.25, Units.KELVIN) + ) + windVel should equalWithTolerance( + Quantities.getQuantity(1.0, StandardUnits.WIND_VELOCITY) + ) + } + } + } } -case object WeatherSourceWrapperSpec { +object WeatherSourceWrapperSpec { // lat/lon are irrelevant, we will manually create weights later on private val coordinate1a = GeoUtils.xyToPoint(6, 51) private val coordinate1b = GeoUtils.xyToPoint(7, 51) @@ -271,4 +410,56 @@ case object WeatherSourceWrapperSpec { } } + /** Prepare test data for WeightSum-related tests + * + * @param weatherSeq + * sequence of raw weather data + * @param weights + * the weights to use for averaging the weather data, with rows equivalent + * to the rows in weatherSeq + * @return + * A tuple of 1. the weighted average weather data and 2. the weight sum + */ + private def prepareWeightTestData( + weatherSeq: Seq[(Double, Double, Double, Double)], + weights: Seq[(Double, Double, Double, Double)] + ): (WeatherData, WeightSum) = { + val weatherData = weatherSeq.map { case (diff, dir, temp, wVel) => + WeatherData( + Quantities.getQuantity(diff, StandardUnits.SOLAR_IRRADIANCE), + Quantities.getQuantity(dir, StandardUnits.SOLAR_IRRADIANCE), + Quantities.getQuantity(temp, Units.KELVIN), + Quantities.getQuantity(wVel, StandardUnits.WIND_VELOCITY) + ) + } + + val weightedWeather = + weatherData.zip(weights).foldLeft(EMPTY_WEATHER_DATA) { + case ( + currentSum, + ( + WeatherData(diffIrr, dirIrr, temp, windVel), + (diffWeight, dirWeight, tempWeight, wVelWeight) + ) + ) => + currentSum.copy( + diffIrr = currentSum.diffIrr.add(diffIrr.multiply(diffWeight)), + dirIrr = currentSum.dirIrr.add(dirIrr.multiply(dirWeight)), + temp = currentSum.temp.add(temp.multiply(tempWeight)), + windVel = currentSum.windVel.add(windVel.multiply(wVelWeight)) + ) + } + val weightSum = weights.foldLeft(WeightSum.EMPTY_WEIGHT_SUM) { + case (currentSum, currentWeight) => + currentSum.add( + currentWeight._1, + currentWeight._2, + currentWeight._3, + currentWeight._4 + ) + } + + (weightedWeather, weightSum) + } + } diff --git a/src/test/scala/edu/ie3/simona/test/common/ThreeWindingTestData.scala b/src/test/scala/edu/ie3/simona/test/common/ThreeWindingTestData.scala index c92a83f43c..e5142e4401 100644 --- a/src/test/scala/edu/ie3/simona/test/common/ThreeWindingTestData.scala +++ b/src/test/scala/edu/ie3/simona/test/common/ThreeWindingTestData.scala @@ -93,7 +93,7 @@ trait ThreeWindingTestData extends DefaultTestData { Quantities.getQuantity(0.08, OHM), Quantities.getQuantity(0.003, OHM), Quantities.getQuantity(40d, MetricPrefix.NANO(SIEMENS)), - Quantities.getQuantity(1d, MetricPrefix.NANO(SIEMENS)), + Quantities.getQuantity(-1d, MetricPrefix.NANO(SIEMENS)), Quantities.getQuantity(1.5, PERCENT), Quantities.getQuantity(0d, DEGREE_GEOM), 0, diff --git a/src/test/scala/edu/ie3/simona/test/common/input/PvInputTestData.scala b/src/test/scala/edu/ie3/simona/test/common/input/PvInputTestData.scala index 9d1fcbed43..415941a886 100644 --- a/src/test/scala/edu/ie3/simona/test/common/input/PvInputTestData.scala +++ b/src/test/scala/edu/ie3/simona/test/common/input/PvInputTestData.scala @@ -52,7 +52,7 @@ trait PvInputTestData 1, Quantities.getQuantity(12, StandardUnits.AZIMUTH), Quantities.getQuantity(10, StandardUnits.EFFICIENCY), - Quantities.getQuantity(100, StandardUnits.SOLAR_HEIGHT), + Quantities.getQuantity(100, StandardUnits.SOLAR_ELEVATION_ANGLE), 12, 11, false, diff --git a/src/test/scala/edu/ie3/simona/test/common/input/TimeSeriesTestData.scala b/src/test/scala/edu/ie3/simona/test/common/input/TimeSeriesTestData.scala new file mode 100644 index 0000000000..64d674e3ad --- /dev/null +++ b/src/test/scala/edu/ie3/simona/test/common/input/TimeSeriesTestData.scala @@ -0,0 +1,39 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.test.common.input + +import edu.ie3.datamodel.io.naming.timeseries.{ + ColumnScheme, + IndividualTimeSeriesMetaInformation +} + +import java.util.UUID + +trait TimeSeriesTestData { + protected val uuidP: UUID = + UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5") + protected val uuidPq: UUID = + UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") + protected val uuidPqh: UUID = + UUID.fromString("46be1e57-e4ed-4ef7-95f1-b2b321cb2047") + + protected val metaP: IndividualTimeSeriesMetaInformation = + new IndividualTimeSeriesMetaInformation( + uuidP, + ColumnScheme.ACTIVE_POWER + ) + protected val metaPq: IndividualTimeSeriesMetaInformation = + new IndividualTimeSeriesMetaInformation( + uuidPq, + ColumnScheme.APPARENT_POWER + ) + protected val metaPqh: IndividualTimeSeriesMetaInformation = + new IndividualTimeSeriesMetaInformation( + uuidPqh, + ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND + ) +} diff --git a/src/test/scala/edu/ie3/simona/test/common/input/Transformer3wTestData.scala b/src/test/scala/edu/ie3/simona/test/common/input/Transformer3wTestData.scala index d686e49f81..cbc439fb99 100644 --- a/src/test/scala/edu/ie3/simona/test/common/input/Transformer3wTestData.scala +++ b/src/test/scala/edu/ie3/simona/test/common/input/Transformer3wTestData.scala @@ -127,7 +127,7 @@ trait Transformer3wTestData extends DefaultTestData { Quantities.getQuantity(0.954711d, OHM), Quantities.getQuantity(1.083000d, OHM), Quantities.getQuantity(40d, MetricPrefix.NANO(SIEMENS)), - Quantities.getQuantity(1d, MetricPrefix.NANO(SIEMENS)), + Quantities.getQuantity(-1d, MetricPrefix.NANO(SIEMENS)), Quantities.getQuantity(1.5, PERCENT), Quantities.getQuantity(0d, DEGREE_GEOM), 0, diff --git a/src/test/scala/edu/ie3/simona/test/common/input/TransformerInputTestData.scala b/src/test/scala/edu/ie3/simona/test/common/input/TransformerInputTestData.scala index 006711c268..06ccc6a8e8 100644 --- a/src/test/scala/edu/ie3/simona/test/common/input/TransformerInputTestData.scala +++ b/src/test/scala/edu/ie3/simona/test/common/input/TransformerInputTestData.scala @@ -91,7 +91,7 @@ trait TransformerInputTestData extends DefaultTestData { Quantities.getQuantity(110d, KILOVOLT), Quantities.getQuantity(10d, KILOVOLT), Quantities.getQuantity(0d, MetricPrefix.NANO(SIEMENS)), - Quantities.getQuantity(1.1, MetricPrefix.NANO(SIEMENS)), + Quantities.getQuantity(-1.1, MetricPrefix.NANO(SIEMENS)), Quantities.getQuantity(1.5, PERCENT), Quantities.getQuantity(0d, DEGREE_GEOM), false, diff --git a/src/test/scala/edu/ie3/simona/test/common/model/grid/DbfsTestGrid.scala b/src/test/scala/edu/ie3/simona/test/common/model/grid/DbfsTestGrid.scala index e8523d1600..15d1bca092 100644 --- a/src/test/scala/edu/ie3/simona/test/common/model/grid/DbfsTestGrid.scala +++ b/src/test/scala/edu/ie3/simona/test/common/model/grid/DbfsTestGrid.scala @@ -235,7 +235,7 @@ trait DbfsTestGrid extends SubGridGateMokka { Quantities.getQuantity(380.0, KILOVOLT), Quantities.getQuantity(110.0, KILOVOLT), Quantities.getQuantity(555.5, MetricPrefix.NANO(SIEMENS)), - Quantities.getQuantity(1.27, MetricPrefix.NANO(SIEMENS)), + Quantities.getQuantity(-1.27, MetricPrefix.NANO(SIEMENS)), Quantities.getQuantity(1.5, PERCENT), Quantities.getQuantity(0, RADIAN), false, diff --git a/src/test/scala/edu/ie3/simona/test/common/model/grid/TransformerTestGrid.scala b/src/test/scala/edu/ie3/simona/test/common/model/grid/TransformerTestGrid.scala index a2a3b4d6b1..0e63d23174 100644 --- a/src/test/scala/edu/ie3/simona/test/common/model/grid/TransformerTestGrid.scala +++ b/src/test/scala/edu/ie3/simona/test/common/model/grid/TransformerTestGrid.scala @@ -84,7 +84,7 @@ trait TransformerTestGrid { Quantities.getQuantity(10d, KILOVOLT), Quantities.getQuantity(0.4d, KILOVOLT), Quantities.getQuantity(0d, SIEMENS), - Quantities.getQuantity(15e-6, SIEMENS), + Quantities.getQuantity(-15e-6, SIEMENS), Quantities.getQuantity(2.5d, PERCENT), Quantities.getQuantity(0d, DEGREE_GEOM), false, @@ -102,7 +102,7 @@ trait TransformerTestGrid { Quantities.getQuantity(10d, KILOVOLT), Quantities.getQuantity(0.4d, KILOVOLT), Quantities.getQuantity(0d, SIEMENS), - Quantities.getQuantity(15e-6, SIEMENS), + Quantities.getQuantity(-15e-6, SIEMENS), Quantities.getQuantity(2.5d, PERCENT), Quantities.getQuantity(0d, DEGREE_GEOM), true, diff --git a/src/test/scala/edu/ie3/simona/test/helper/TestContainerHelper.scala b/src/test/scala/edu/ie3/simona/test/helper/TestContainerHelper.scala new file mode 100644 index 0000000000..f58786aa6d --- /dev/null +++ b/src/test/scala/edu/ie3/simona/test/helper/TestContainerHelper.scala @@ -0,0 +1,35 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.test.helper + +import akka.testkit.TestException +import org.testcontainers.utility.MountableFile + +import java.nio.file.Paths + +trait TestContainerHelper { + + /** Retrieve resource with the class' resource loader. In contrast to + * [[org.testcontainers.utility.MountableFile#forClasspathResource(java.lang.String, java.lang.Integer)]], + * this also works with paths relative to the current class (i.e. without + * leading '/'). + * @param resource + * the resource directory or file path + * @return + * a MountableFile to use with test containers + */ + def getMountableFile(resource: String): MountableFile = { + Option(getClass.getResource(resource)) + .map(url => Paths.get(url.toURI)) + .map(MountableFile.forHostPath) + .getOrElse( + throw TestException( + "Resource '" + resource + "' was not found from " + getClass.toString + ) + ) + } +}