From d8a4c2343c9ded6c2ae8a11dd93024dcd740c92e Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 11 Mar 2024 13:59:16 +0100
Subject: [PATCH 01/36] first file structure
---
.../participant/markov/MarkovAgent.scala | 133 +++++
.../markov/MarkovAgentFundamentals.scala | 561 ++++++++++++++++++
2 files changed, 694 insertions(+)
create mode 100644 src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
create mode 100644 src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
new file mode 100644
index 0000000000..5d6be0911d
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
@@ -0,0 +1,133 @@
+/*
+ * © 2020. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.agent.participant.markov
+
+import edu.ie3.datamodel.models.input.system.MarkovInput
+import edu.ie3.simona.agent.participant.ParticipantAgent
+import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
+import edu.ie3.simona.agent.participant.markov.MarkovAgentFundamentals.{
+ FixedMarkovAgentFundamentals,
+ ProfileMarkovAgentFundamentals,
+ RandomMarkovAgentFundamentals,
+}
+import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
+import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.ParticipantInitializeStateData
+import edu.ie3.simona.config.SimonaConfig.MarkovRuntimeConfig
+import edu.ie3.simona.model.participant.CalcRelevantData.MarkovRelevantData
+import edu.ie3.simona.model.participant.ModelState.ConstantState
+import edu.ie3.simona.model.participant.markov.profile.ProfileMarkovModel
+import edu.ie3.simona.model.participant.markov.profile.ProfileMarkovModel.ProfileRelevantData
+import edu.ie3.simona.model.participant.markov.random.RandomMarkovModel
+import edu.ie3.simona.model.participant.markov.random.RandomMarkovModel.RandomRelevantData
+import edu.ie3.simona.model.participant.markov.{
+ FixedMarkovModel,
+ MarkovModel,
+ MarkovModelBehaviour,
+}
+import org.apache.pekko.actor.{ActorRef, Props}
+
+object MarkovAgent {
+ def props(
+ scheduler: ActorRef,
+ initStateData: ParticipantInitializeStateData[
+ MarkovInput,
+ MarkovRuntimeConfig,
+ ApparentPower,
+ ],
+ listener: Iterable[ActorRef],
+ ): Props =
+ MarkovModelBehaviour(initStateData.modelConfig.modelBehaviour) match {
+ case MarkovModelBehaviour.FIX =>
+ Props(new FixedMarkovAgent(scheduler, initStateData, listener))
+ case MarkovModelBehaviour.PROFILE =>
+ Props(new ProfileMarkovAgent(scheduler, initStateData, listener))
+ case MarkovModelBehaviour.RANDOM =>
+ Props(new RandomMarkovAgent(scheduler, initStateData, listener))
+ case unsupported =>
+ throw new IllegalArgumentException(
+ s"The markov agent behaviour '$unsupported' is currently not supported."
+ )
+ }
+
+ final class FixedMarkovAgent(
+ scheduler: ActorRef,
+ initStateData: ParticipantInitializeStateData[
+ MarkovInput,
+ MarkovRuntimeConfig,
+ ApparentPower,
+ ],
+ override val listener: Iterable[ActorRef],
+ ) extends MarkovAgent[
+ FixedMarkovModel.FixedMarkovRelevantData.type,
+ FixedMarkovModel,
+ ](scheduler, initStateData, listener)
+ with FixedMarkovAgentFundamentals
+
+ final class ProfileMarkovAgent(
+ scheduler: ActorRef,
+ initStateData: ParticipantInitializeStateData[
+ MarkovInput,
+ MarkovRuntimeConfig,
+ ApparentPower,
+ ],
+ override val listener: Iterable[ActorRef],
+ ) extends MarkovAgent[
+ ProfileRelevantData,
+ ProfileMarkovModel,
+ ](scheduler, initStateData, listener)
+ with ProfileMarkovAgentFundamentals
+
+ final class RandomMarkovAgent(
+ scheduler: ActorRef,
+ initStateData: ParticipantInitializeStateData[
+ MarkovInput,
+ MarkovRuntimeConfig,
+ ApparentPower,
+ ],
+ override val listener: Iterable[ActorRef],
+ ) extends MarkovAgent[
+ RandomRelevantData,
+ RandomMarkovModel,
+ ](scheduler, initStateData, listener)
+ with RandomMarkovAgentFundamentals
+}
+
+/** Creating a markov agent
+ *
+ * @param scheduler
+ * Actor reference of the scheduler
+ * @param listener
+ * List of listeners interested in results
+ */
+abstract class MarkovAgent[MD <: MarkovRelevantData, MM <: MarkovModel[MD]](
+ scheduler: ActorRef,
+ initStateData: ParticipantInitializeStateData[
+ MarkovInput,
+ MarkovRuntimeConfig,
+ ApparentPower,
+ ],
+ override val listener: Iterable[ActorRef],
+) extends ParticipantAgent[
+ ApparentPower,
+ MD,
+ ConstantState.type,
+ ParticipantStateData[ApparentPower],
+ MarkovInput,
+ MarkovRuntimeConfig,
+ MM,
+ ](scheduler, initStateData)
+ with MarkovAgentFundamentals[MD, MM] {
+ /*
+ * "Hey, SIMONA! What is handled in ParticipantAgent?"
+ * "Hey, dude! The following things are handled in ParticipantAgent:
+ * 1) Initialization of Agent
+ * 2) Event reactions in Idle state
+ * 3) Handling of incoming information
+ * 4) Performing model calculations
+ * "
+ */
+}
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
new file mode 100644
index 0000000000..2cf4fbaf7e
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
@@ -0,0 +1,561 @@
+/*
+ * © 2020. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.agent.participant.markov
+
+import edu.ie3.datamodel.models.input.system.LoadInput
+import edu.ie3.datamodel.models.result.system.{
+ LoadResult,
+ SystemParticipantResult,
+}
+import edu.ie3.simona.agent.ValueStore
+import edu.ie3.simona.agent.participant.ParticipantAgent.getAndCheckNodalVoltage
+import edu.ie3.simona.agent.participant.ParticipantAgentFundamentals
+import edu.ie3.simona.agent.participant.data.Data.PrimaryData.{
+ ApparentPower,
+ ZERO_POWER,
+}
+import edu.ie3.simona.agent.participant.data.Data.SecondaryData
+import edu.ie3.simona.agent.participant.data.secondary.SecondaryDataService
+import edu.ie3.simona.agent.participant.statedata.BaseStateData.{
+ FlexControlledData,
+ ParticipantModelBaseStateData,
+}
+import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
+import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.InputModelContainer
+import edu.ie3.simona.agent.state.AgentState
+import edu.ie3.simona.agent.state.AgentState.Idle
+import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
+import edu.ie3.simona.event.notifier.NotifierConfig
+import edu.ie3.simona.exceptions.agent.InconsistentStateException
+import edu.ie3.simona.model.SystemComponent
+import edu.ie3.simona.model.participant.CalcRelevantData.LoadRelevantData
+import edu.ie3.simona.model.participant.ModelState.ConstantState
+import edu.ie3.simona.model.participant.load.FixedLoadModel.FixedLoadRelevantData
+import edu.ie3.simona.model.participant.load.profile.ProfileLoadModel.ProfileRelevantData
+import edu.ie3.simona.model.participant.load.profile.{
+ LoadProfileStore,
+ ProfileLoadModel,
+}
+import edu.ie3.simona.model.participant.load.random.RandomLoadModel.RandomRelevantData
+import edu.ie3.simona.model.participant.load.random.{
+ RandomLoadModel,
+ RandomLoadParamStore,
+}
+import edu.ie3.simona.model.participant.load.{
+ FixedLoadModel,
+ LoadModel,
+ LoadReference,
+}
+import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
+import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage.{
+ FlexRequest,
+ FlexResponse,
+}
+import edu.ie3.simona.util.SimonaConstants
+import edu.ie3.simona.util.TickUtil._
+import edu.ie3.util.quantities.PowerSystemUnits.PU
+import edu.ie3.util.quantities.QuantityUtils.RichQuantityDouble
+import edu.ie3.util.scala.OperationInterval
+import edu.ie3.util.scala.quantities.ReactivePower
+import org.apache.pekko.actor.typed.scaladsl.adapter.ClassicActorRefOps
+import org.apache.pekko.actor.typed.{ActorRef => TypedActorRef}
+import org.apache.pekko.actor.{ActorRef, FSM}
+import squants.{Dimensionless, Each, Power}
+
+import java.time.ZonedDateTime
+import java.util.UUID
+import scala.collection.SortedSet
+import scala.reflect.{ClassTag, classTag}
+
+protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovModel[
+ LD
+]] extends ParticipantAgentFundamentals[
+ ApparentPower,
+ LD,
+ ConstantState.type,
+ ParticipantStateData[ApparentPower],
+ MarkovInput,
+ MarkovRuntimeConfig,
+ LM,
+ ] {
+ this: MarkovAgent[LD, LM] =>
+ override protected val pdClassTag: ClassTag[ApparentPower] =
+ classTag[ApparentPower]
+ override val alternativeResult: ApparentPower = ZERO_POWER
+
+ /** Determines the needed base state data in dependence of the foreseen
+ * simulation mode of the agent.
+ *
+ * @param inputModel
+ * Input model definition
+ * @param modelConfig
+ * Configuration of the model
+ * @param services
+ * Collection of services to register with
+ * @param simulationStartDate
+ * Real world time date time, when the simulation starts
+ * @param simulationEndDate
+ * Real world time date time, when the simulation ends
+ * @param resolution
+ * Agents regular time bin it wants to be triggered e.g one hour
+ * @param requestVoltageDeviationThreshold
+ * Threshold, after which two nodal voltage magnitudes from participant
+ * power requests for the same tick are considered to be different
+ * @param outputConfig
+ * Config of the output behaviour for simulation results
+ * @return
+ * A child of [[ParticipantModelBaseStateData]] that reflects the behaviour
+ * based on the data source definition
+ */
+ override def determineModelBaseStateData(
+ inputModel: InputModelContainer[LoadInput],
+ modelConfig: LoadRuntimeConfig,
+ services: Iterable[SecondaryDataService[_ <: SecondaryData]],
+ simulationStartDate: ZonedDateTime,
+ simulationEndDate: ZonedDateTime,
+ resolution: Long,
+ requestVoltageDeviationThreshold: Double,
+ outputConfig: NotifierConfig,
+ maybeEmAgent: Option[TypedActorRef[FlexResponse]],
+ ): ParticipantModelBaseStateData[
+ ApparentPower,
+ LD,
+ ConstantState.type,
+ LM,
+ ] = {
+ /* Build the calculation model */
+ val model =
+ buildModel(
+ inputModel,
+ modelConfig,
+ simulationStartDate,
+ simulationEndDate,
+ )
+
+ /* Go and collect all ticks, in which activation is needed in addition to the activations made by incoming data.
+ * Also register for services, where needed. */
+ val lastTickInSimulation = simulationEndDate.toTick(simulationStartDate)
+ val additionalActivationTicks = model match {
+ /* If no secondary data is needed (implicitly by fixed load model), add activation ticks for the simple model */
+ case fixedLoadModel: FixedLoadModel =>
+ /* As participant agents always return their last known operation point on request, it is sufficient
+ * to let a fixed load model determine it's operation point on:
+ * 1) The first tick of the simulation
+ * 2) The tick, it turns on (in time dependent operation)
+ * 3) The tick, it turns off (in time dependent operation)
+ * Coinciding ticks are summarized and the last tick is removed, as the change in operation status
+ * doesn't affect anything then */
+ SortedSet[Long](
+ SimonaConstants.FIRST_TICK_IN_SIMULATION,
+ fixedLoadModel.operationInterval.start,
+ fixedLoadModel.operationInterval.end,
+ ).filterNot(_ == lastTickInSimulation)
+ case profileLoadModel: ProfileLoadModel =>
+ activationTicksInOperationTime(
+ simulationStartDate,
+ LoadProfileStore.resolution.getSeconds,
+ profileLoadModel.operationInterval.start,
+ profileLoadModel.operationInterval.end,
+ )
+ case randomLoadModel: RandomLoadModel =>
+ activationTicksInOperationTime(
+ simulationStartDate,
+ RandomLoadParamStore.resolution.getSeconds,
+ randomLoadModel.operationInterval.start,
+ randomLoadModel.operationInterval.end,
+ )
+ case _ =>
+ SortedSet.empty[Long]
+ }
+
+ ParticipantModelBaseStateData[ApparentPower, LD, ConstantState.type, LM](
+ simulationStartDate,
+ simulationEndDate,
+ model,
+ services,
+ outputConfig,
+ additionalActivationTicks,
+ Map.empty,
+ requestVoltageDeviationThreshold,
+ ValueStore.forVoltage(
+ resolution,
+ Each(
+ inputModel.electricalInputModel.getNode
+ .getvTarget()
+ .to(PU)
+ .getValue
+ .doubleValue
+ ),
+ ),
+ ValueStore(resolution),
+ ValueStore(resolution),
+ ValueStore(resolution),
+ ValueStore(resolution),
+ maybeEmAgent.map(FlexControlledData(_, self.toTyped[FlexRequest])),
+ )
+ }
+
+ override def buildModel(
+ inputModel: InputModelContainer[LoadInput],
+ modelConfig: LoadRuntimeConfig,
+ simulationStartDate: ZonedDateTime,
+ simulationEndDate: ZonedDateTime,
+ ): LM = {
+ val operationInterval: OperationInterval =
+ SystemComponent.determineOperationInterval(
+ simulationStartDate,
+ simulationEndDate,
+ inputModel.electricalInputModel.getOperationTime,
+ )
+ val reference = LoadReference(inputModel.electricalInputModel, modelConfig)
+ buildModel(
+ inputModel.electricalInputModel,
+ operationInterval,
+ modelConfig,
+ reference,
+ )
+ }
+
+ protected def buildModel(
+ inputModel: LoadInput,
+ operationInterval: OperationInterval,
+ modelConfig: LoadRuntimeConfig,
+ reference: LoadReference,
+ ): LM
+
+ override protected def createInitialState(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ LD,
+ ConstantState.type,
+ LM,
+ ]
+ ): ModelState.ConstantState.type = ConstantState
+
+ /** Handle an active power change by flex control.
+ * @param tick
+ * Tick, in which control is issued
+ * @param baseStateData
+ * Base state data of the agent
+ * @param data
+ * Calculation relevant data
+ * @param lastState
+ * Last known model state
+ * @param setPower
+ * Setpoint active power
+ * @return
+ * Updated model state, a result model and a [[FlexChangeIndicator]]
+ */
+ def handleControlledPowerChange(
+ tick: Long,
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ LD,
+ ConstantState.type,
+ LM,
+ ],
+ data: LD,
+ lastState: ConstantState.type,
+ setPower: squants.Power,
+ ): (ConstantState.type, ApparentPower, FlexChangeIndicator) = {
+ /* Calculate result */
+ val voltage = getAndCheckNodalVoltage(baseStateData, tick)
+
+ val reactivePower = baseStateData.model.calculateReactivePower(
+ setPower,
+ voltage,
+ )
+ val result = ApparentPower(setPower, reactivePower)
+
+ /* Handle the request within the model */
+ val (updatedState, flexChangeIndicator) =
+ baseStateData.model.handleControlledPowerChange(data, lastState, setPower)
+ (updatedState, result, flexChangeIndicator)
+ }
+
+ /** Calculate the power output of the participant utilising secondary data.
+ * However, it might appear, that not the complete set of secondary data is
+ * available for the given tick. This might especially be true, if the actor
+ * has been additionally activated. This method thereby has to try and fill
+ * up missing data with the last known data, as this is still supposed to be
+ * valid. The secondary data therefore is put to the calculation relevant
+ * data store. The next state is [[Idle]], sending a
+ * [[edu.ie3.simona.ontology.messages.SchedulerMessage.Completion]] to
+ * scheduler and using update result values.
+ *
+ * @param baseStateData
+ * The base state data with collected secondary data
+ * @param maybeLastModelState
+ * Optional last model state
+ * @param currentTick
+ * Tick, the trigger belongs to
+ * @param scheduler
+ * [[ActorRef]] to the scheduler in the simulation
+ * @return
+ * [[Idle]] with updated result values
+ */
+ override def calculatePowerWithSecondaryDataAndGoToIdle(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ LD,
+ ConstantState.type,
+ LM,
+ ],
+ lastModelState: ConstantState.type,
+ currentTick: Long,
+ scheduler: ActorRef,
+ ): FSM.State[AgentState, ParticipantStateData[ApparentPower]] =
+ throw new InconsistentStateException(
+ s"Load model is not able to calculate power with secondary data."
+ )
+
+ /** Determine the average result within the given tick window
+ *
+ * @param tickToResults
+ * Mapping from data tick to actual data
+ * @param windowStart
+ * First, included tick of the time window
+ * @param windowEnd
+ * Last, included tick of the time window
+ * @param activeToReactivePowerFuncOpt
+ * An Option on a function, that transfers the active into reactive power
+ * @return
+ * The averaged result
+ */
+ override def averageResults(
+ tickToResults: Map[Long, ApparentPower],
+ windowStart: Long,
+ windowEnd: Long,
+ activeToReactivePowerFuncOpt: Option[
+ Power => ReactivePower
+ ] = None,
+ ): ApparentPower =
+ ParticipantAgentFundamentals.averageApparentPower(
+ tickToResults,
+ windowStart,
+ windowEnd,
+ activeToReactivePowerFuncOpt,
+ log,
+ )
+
+ /** Determines the correct result.
+ *
+ * @param uuid
+ * Unique identifier of the physical model
+ * @param dateTime
+ * Real world date of the result
+ * @param result
+ * The primary data to build a result model for
+ * @return
+ * The equivalent event
+ */
+ override protected def buildResult(
+ uuid: UUID,
+ dateTime: ZonedDateTime,
+ result: ApparentPower,
+ ): SystemParticipantResult =
+ new LoadResult(
+ dateTime,
+ uuid,
+ result.p.toMegawatts.asMegaWatt,
+ result.q.toMegavars.asMegaVar,
+ )
+
+ override protected def updateState(
+ tick: Long,
+ modelState: ModelState.ConstantState.type,
+ calcRelevantData: LD,
+ nodalVoltage: squants.Dimensionless,
+ model: LM,
+ ): ModelState.ConstantState.type = modelState
+}
+
+object LoadAgentFundamentals {
+ trait FixedLoadAgentFundamentals
+ extends LoadAgentFundamentals[
+ FixedLoadModel.FixedLoadRelevantData.type,
+ FixedLoadModel,
+ ] {
+ this: MarkovAgent.FixedLoadAgent =>
+
+ override def buildModel(
+ inputModel: LoadInput,
+ operationInterval: OperationInterval,
+ modelConfig: LoadRuntimeConfig,
+ reference: LoadReference,
+ ): FixedLoadModel =
+ FixedLoadModel(
+ inputModel,
+ modelConfig.scaling,
+ operationInterval,
+ reference,
+ )
+
+ override protected def createCalcRelevantData(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ FixedLoadRelevantData.type,
+ ConstantState.type,
+ FixedLoadModel,
+ ],
+ tick: Long,
+ ): FixedLoadRelevantData.type =
+ FixedLoadRelevantData
+
+ /** Partial function, that is able to transfer
+ * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
+ * into a pair of active and reactive power
+ */
+ override val calculateModelPowerFunc: (
+ Long,
+ ParticipantModelBaseStateData[
+ ApparentPower,
+ FixedLoadRelevantData.type,
+ ConstantState.type,
+ FixedLoadModel,
+ ],
+ ConstantState.type,
+ Dimensionless,
+ ) => ApparentPower = (
+ tick: Long,
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ FixedLoadRelevantData.type,
+ ConstantState.type,
+ FixedLoadModel,
+ ],
+ state: ConstantState.type,
+ voltage: Dimensionless,
+ ) =>
+ baseStateData.model.calculatePower(
+ tick,
+ voltage,
+ state,
+ FixedLoadRelevantData,
+ )
+ }
+
+ trait ProfileLoadAgentFundamentals
+ extends LoadAgentFundamentals[
+ ProfileRelevantData,
+ ProfileLoadModel,
+ ] {
+ this: MarkovAgent.ProfileLoadAgent =>
+
+ override def buildModel(
+ inputModel: LoadInput,
+ operationInterval: OperationInterval,
+ modelConfig: LoadRuntimeConfig,
+ reference: LoadReference,
+ ): ProfileLoadModel =
+ ProfileLoadModel(
+ inputModel,
+ operationInterval,
+ modelConfig.scaling,
+ reference,
+ )
+
+ override protected def createCalcRelevantData(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ ProfileRelevantData,
+ ConstantState.type,
+ ProfileLoadModel,
+ ],
+ currentTick: Long,
+ ): ProfileRelevantData =
+ ProfileRelevantData(
+ currentTick.toDateTime(baseStateData.startDate)
+ )
+
+ /** Partial function, that is able to transfer
+ * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
+ * into a pair of active and reactive power
+ */
+ override val calculateModelPowerFunc: (
+ Long,
+ ParticipantModelBaseStateData[
+ ApparentPower,
+ ProfileRelevantData,
+ ConstantState.type,
+ ProfileLoadModel,
+ ],
+ ConstantState.type,
+ Dimensionless,
+ ) => ApparentPower = (tick, baseStateData, _, voltage) => {
+ val profileRelevantData =
+ createCalcRelevantData(baseStateData, tick)
+
+ baseStateData.model.calculatePower(
+ currentTick,
+ voltage,
+ ConstantState,
+ profileRelevantData,
+ )
+ }
+ }
+
+ trait RandomLoadAgentFundamentals
+ extends LoadAgentFundamentals[
+ RandomRelevantData,
+ RandomLoadModel,
+ ] {
+ this: MarkovAgent.RandomLoadAgent =>
+
+ override def buildModel(
+ inputModel: LoadInput,
+ operationInterval: OperationInterval,
+ modelConfig: LoadRuntimeConfig,
+ reference: LoadReference,
+ ): RandomLoadModel =
+ RandomLoadModel(
+ inputModel,
+ operationInterval,
+ modelConfig.scaling,
+ reference,
+ )
+
+ override protected def createCalcRelevantData(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ RandomRelevantData,
+ ConstantState.type,
+ RandomLoadModel,
+ ],
+ tick: Long,
+ ): RandomRelevantData =
+ RandomRelevantData(
+ tick.toDateTime(baseStateData.startDate)
+ )
+
+ /** Partial function, that is able to transfer
+ * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
+ * into a pair of active and reactive power
+ */
+ override val calculateModelPowerFunc: (
+ Long,
+ ParticipantModelBaseStateData[
+ ApparentPower,
+ RandomRelevantData,
+ ConstantState.type,
+ RandomLoadModel,
+ ],
+ ConstantState.type,
+ Dimensionless,
+ ) => ApparentPower = (tick, baseStateData, _, voltage) => {
+ val profileRelevantData =
+ createCalcRelevantData(baseStateData, tick)
+
+ baseStateData.model.calculatePower(
+ currentTick,
+ voltage,
+ ConstantState,
+ profileRelevantData,
+ )
+ }
+ }
+}
From 011babf037e6d4725a28b84a0b550108bf4f8df7 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Wed, 13 Mar 2024 12:19:44 +0100
Subject: [PATCH 02/36] Test lfs
---
.../edu/ie3/simona/agent/participant/markov/MarkovAgent.scala | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
index 5d6be0911d..771ce1dcff 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
@@ -1,5 +1,5 @@
/*
- * © 2020. TU Dortmund University,
+ * © 2024. TU Dortmund University,
* Institute of Energy Systems, Energy Efficiency and Energy Economics,
* Research group Distribution grid planning and operation
*/
From 1f71034d38d2b30144558c5d8052edec9796c862 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Wed, 13 Mar 2024 13:52:39 +0100
Subject: [PATCH 03/36] add Markov CSV
---
.../markov/appliances/average_hh.csv | 2 ++
.../resources/markov/appliances/by_income.csv | 9 +++++++
.../markov/appliances/by_inhabitants.csv | 6 +++++
.../resources/markov/appliances/by_type.csv | 3 +++
.../resources/markov/appliances/load_ts.csv | 25 ++++++++++++++++++
.../resources/markov/appliances/load_ts.xlsx | Bin 0 -> 11666 bytes
src/main/resources/markov/config.yaml | 5 ++++
.../switch_on_probabilities/dish_washer.csv | 25 ++++++++++++++++++
.../switch_on_probabilities/dryer.csv | 25 ++++++++++++++++++
.../switch_on_probabilities/freezer.csv | 25 ++++++++++++++++++
.../switch_on_probabilities/fridge.csv | 25 ++++++++++++++++++
.../switch_on_probabilities/lighting.csv | 25 ++++++++++++++++++
.../switch_on_probabilities/pc.csv | 25 ++++++++++++++++++
.../switch_on_probabilities/stove.csv | 25 ++++++++++++++++++
.../telecommunication.csv | 25 ++++++++++++++++++
.../switch_on_probabilities/television.csv | 25 ++++++++++++++++++
.../video_recorder.csv | 25 ++++++++++++++++++
.../washing_machine.csv | 25 ++++++++++++++++++
.../switch_on_probabilities/water_heating.csv | 25 ++++++++++++++++++
.../usage_probabilities.csv | 14 ++++++++++
20 files changed, 364 insertions(+)
create mode 100644 src/main/resources/markov/appliances/average_hh.csv
create mode 100644 src/main/resources/markov/appliances/by_income.csv
create mode 100644 src/main/resources/markov/appliances/by_inhabitants.csv
create mode 100644 src/main/resources/markov/appliances/by_type.csv
create mode 100644 src/main/resources/markov/appliances/load_ts.csv
create mode 100644 src/main/resources/markov/appliances/load_ts.xlsx
create mode 100644 src/main/resources/markov/config.yaml
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/dish_washer.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/dryer.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/freezer.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/fridge.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/lighting.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/pc.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/stove.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/telecommunication.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/television.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/video_recorder.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/washing_machine.csv
create mode 100644 src/main/resources/markov/probabilities/switch_on_probabilities/water_heating.csv
create mode 100644 src/main/resources/markov/probabilities/usage_probabilities/usage_probabilities.csv
diff --git a/src/main/resources/markov/appliances/average_hh.csv b/src/main/resources/markov/appliances/average_hh.csv
new file mode 100644
index 0000000000..48a215957c
--- /dev/null
+++ b/src/main/resources/markov/appliances/average_hh.csv
@@ -0,0 +1,2 @@
+washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
+0,972;0,394;0,686;0,984;1,219;0,561;1,58;0,9;1,649;2,963;2,5;0,3;1
\ No newline at end of file
diff --git a/src/main/resources/markov/appliances/by_income.csv b/src/main/resources/markov/appliances/by_income.csv
new file mode 100644
index 0000000000..1a45788189
--- /dev/null
+++ b/src/main/resources/markov/appliances/by_income.csv
@@ -0,0 +1,9 @@
+income;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
+below 900;0,835;0,154;0,306;0,885;1,024;0,286;1,05;0,559;0,953;1,807;1;0,1;1,3
+from 900 to 1300;0,924;0,219;0,462;0,926;1,059;0,388;1,232;0,637;1,038;2,093;1,2;0,1;1,4
+from 1300 to 1500;0,946;0,269;0,555;0,944;1,099;0,456;1,349;0,721;1,166;2,302;1,8;0,1;1,5
+from 1500 to 2000;0,964;0,33;0,645;0,963;1,14;0,515;1,486;0,83;1,352;2,574;2;0,2;1,6
+from 2000 to 2600;0,996;0,444;0,77;0,998;1,238;0,635;1,665;0,949;1,656;3,082;2,3;0,2;1,8
+from 2600 to 3600;1,02;0,53;0,875;1,03;1,317;0,691;1,871;1,105;2,095;3,644;2,8;0,3;2
+from 3600 to 5000;1,041;0,616;0,954;1,068;1,447;0,751;2,03;1,221;2,499;4,177;3;0,3;2,3
+from 5000 to 18000;1,075;0,694;1,009;1,099;1,59;0,82;2,15;1,335;3,04;4,708;3,2;0,3;2,8
\ No newline at end of file
diff --git a/src/main/resources/markov/appliances/by_inhabitants.csv b/src/main/resources/markov/appliances/by_inhabitants.csv
new file mode 100644
index 0000000000..684033d37c
--- /dev/null
+++ b/src/main/resources/markov/appliances/by_inhabitants.csv
@@ -0,0 +1,6 @@
+inhabitants;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
+1;0,894;0,223;0,459;0,927;1,055;0,346;1,166;0,645;1,021;1,935;1;0,097;1
+2;1,007;0,431;0,772;1,004;1,282;0,661;1,703;0,923;1,656;3,096;2;0,153;1,5
+3;1,032;0,556;0,894;1,036;1,356;0,711;2,034;1,218;2,451;4,063;2,333;0,208;2
+4;1,05;0,661;0,961;1,052;1,416;0,796;2,099;1,322;2,743;4,601;2,833;0,25;2,5
+5;1,098;0,732;0,988;1,079;1,494;0,904;2,155;1,362;3,133;5,312;3;0,292;3,5
\ No newline at end of file
diff --git a/src/main/resources/markov/appliances/by_type.csv b/src/main/resources/markov/appliances/by_type.csv
new file mode 100644
index 0000000000..c4d394b95c
--- /dev/null
+++ b/src/main/resources/markov/appliances/by_type.csv
@@ -0,0 +1,3 @@
+type;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
+flat;0,926;0,269;0,545;0,94;1,088;0,368;1,354;0,807;1,453;2,535;1,5;0,1;1
+house;1,032;0,561;0,873;1,043;1,393;0,817;1,88;1,023;1,91;3,53;2,5;0,3;1,5
\ No newline at end of file
diff --git a/src/main/resources/markov/appliances/load_ts.csv b/src/main/resources/markov/appliances/load_ts.csv
new file mode 100644
index 0000000000..7c92a5fc05
--- /dev/null
+++ b/src/main/resources/markov/appliances/load_ts.csv
@@ -0,0 +1,25 @@
+washing_machine,dish_washer,dryer,stove,fridge,freezer,television,video_recorder,pc,telecommunication,lighting,water_heating,other_load
+100,80,2000,700,125,130,150,30,130,40,60,18000,55
+2000,2000,2000,700,1,1,150,30,130,40,60,,
+900,80,2000,700,1,1,150,30,130,40,60,,
+100,80,1600,700,125,130,150,30,130,40,60,,
+100,80,1300,,1,1,150,30,130,40,,,
+300,2000,940,,1,1,150,30,130,40,,,
+50,300,,,125,130,,30,130,40,,,
+,150,,,1,1,,30,130,40,,,
+,,,,1,1,,,,40,,,
+,,,,125,130,,,,40,,,
+,,,,1,1,,,,40,,,
+,,,,1,1,,,,40,,,
+,,,,125,130,,,,40,,,
+,,,,1,1,,,,40,,,
+,,,,1,1,,,,40,,,
+,,,,125,130,,,,40,,,
+,,,,1,1,,,,40,,,
+,,,,1,1,,,,40,,,
+,,,,125,130,,,,40,,,
+,,,,1,1,,,,40,,,
+,,,,1,1,,,,40,,,
+,,,,125,130,,,,40,,,
+,,,,1,1,,,,40,,,
+,,,,1,1,,,,40,,,
diff --git a/src/main/resources/markov/appliances/load_ts.xlsx b/src/main/resources/markov/appliances/load_ts.xlsx
new file mode 100644
index 0000000000000000000000000000000000000000..c5b5696f63b2ca4b06513296f5bf76f8b3db2a95
GIT binary patch
literal 11666
zcmeHt^;ccV()K|@a0ndST@UUOG`PFFy9al--~j>zC%8j!cZcBa4#6EhX6D{^W-|Bt
z3-0Y7datuiSFPRM^;FfadgLS_Ab|iV05kvqAOaYiq?_r00RSN)m=-x)_i@+=x92ZGj|~s`#N1NgB8}b{OPV%MEl>A0%IJtox%);oG&5b0
zW(L0>k^kfzK+ZCv2ECNYX3VRb4=qP(=w~P4@&nmADsJ~Gj!|RArx+s{Ok!)=wYl
z2ArHF?fWo2LjeFUFAxB^zrnIrnStc;)z+k5Q3v-5OI-(JD@S^|-{$|p@xK^@f7yCT
zoV08g1AOqQ*kefl&Gd2%vY_Ng0kI|`C2wExC8XM@98#R+HVRy1CF}qQQJ)s?$HB#A
zo~VO`T_W-?>a*O<$*oO1M$Fw0xp2{{AgX
za$uE2Wa><~5_ybH6&nUM4>uT{H^on*S6Y3=;I<5GT2S%0G`OOkJ@X)b?2FHAQvN;y
zUnr;4(PRo*zoUWae3@sz717llwz8rbr+I}zh65L|o34>n`-SkAHq<9ikW5;?0x=89
zEz_82A6e#=k46p4*-*M$H#hexr#&GsI4|5Ne^Kg>~=cR=W@e}xB-EB)j{(u90k%H)#{Y{jqS+A2n4jj
zaVfsP=o?g&ql3fZV>rZAN7RP*as9LzK#rYTN;0gFoP1*8%ZABd7K>2^XC@2BalFA-
zn;ELv^1TmKJSz3CsjnwYr83!mX%JexhWhI7Mlf-K1tZRb;0Bo3`$0r=+wtKV`n>tk
zMA~iCvLojnejB$~62FL**E86OFr;6Uz=tZ%aXUMuN%?f$N$_Eis%`O{cIfidWNV;r
zdiC|D|7kMhQ}|&S5CFjKYxaZmy5iMj{%R~G%8NFeY{*{p<37ljWpWA5qG>_SjBNbP
z49zRKUa)3i(Q{(ik_wdpdM{ofRokS8*>=?kxC&d5h-1TPS#*5IsSB>zf=nyX(w_QX
zpse9^s;>2|yftM?$60cTt;H=?CM)lx^6vHzZdU2)wRp-nd{_XPiH6k*7N{ho5#emY
zIzJJtJF0fFiC~j9zy_i+a}&_=iV9;57vEbzTaNGw(vEhl9kSAx^+uFsl`aH8YoNZl
zH11(@;I|BST*D^DZ;Dd>X##;vqr!Z(6p$*S*gcB`!Z1=6n&Upg;s)a*c&+CScKmENHv$fv&KV;0dwE&gz%{(@I^r7`Q4Duo33m69Q_!T
z@NI%#19%47mWs!SxRg`LkClcymom@i?Ras{Z3((#M;lSO0
z?1K#I1@{stVTdCpr{Sb1V&anjBz-9@_)|<77aMWpYeaac3Ei!3jU37d|H7lV_fC2)
zTDfE{;aik*3O5Q^>UTyZCb`p}Y;&b#9(MRbQ-EAT&WH8bep*cUhR7v%P
ztJ~b~w1Q;&@?}#sv^C$9OV)PGNvdr)shZi#_I&$k`BH0lfOdd<_dymtV&Lo0TqjkIKX7^y-8_%iJnuU90UPCQi8IPpGTqVs^FvfR|3-cS08yHzRyy+O_H5X-
z7vJR37!~#S%>;ZwSb-*cepwc&m{1xm&(rCyx^1Q<)b%Y(?#rtsK1
z5&LmS((C=Pt0Iitvq4kWXc!YszdTEOhWk1(g@Z8|xn;Fz0yCUqS!-*dkMW^13p|X7
zW~q7an!vP3qKI)bxt6mSSspWm!M84AXlEI$&5v>1$>R*=F8v$sF9$dzP?O9B8fB6T
zR>9@z{3pgppAv^}MKjm4XN}UHW^_EkL3K7#y#f*nMT~NIVxt?NFoK-sz82Y^X$C8p
zcZ}qV&FYb;3v~BAm_}nBSZSi8CQKPZUD)2`tORAFWGa)r3*9KpY9!=F^!A_OFtEuD
z8Ki^Fm;x|LUAS7TS;4s1b-*6#@=VGubZdc2ggD#EaK*z{MH5JI~A
zrU~Gogmz~faI<>cCh#bT7Ba`D9-@=0tKiF~$rF$m_Ix-3xP#yWDfu5R5`x#bKXN0o
z)LMRtgbBw?ChmzKdl{QVdrU8g^b1Id6gr}x?56On-=aJhvuj+_<>eJ2(&+b`C6uCMvvy(=f{;-Z?gv+sS+tLQqlbO^D+;5vZe
zykzp;Gtm@BRrPAB*L*$nL%NFw2l1I~B}Fu}a1Tsr!!VA9K>_&ke$(#bMfC#ZN9mpP
zNXl~YbPLtxRqoGaKVfg`;aa*v%6}Tszoec`kMU~HM;zS(u6u<}Z2lAW+&hkyXPw8cz{vswhb4F{jN=N~tEB&+i{N!C+Y>#0E-;l2B`_k|v
zLsh->4)yN~rNdjnIyoIzIxyXk-zohtZK|-_4SH5C?Sv+}1CzAB
zja6PYu~Hn-a$r^gfolWFN;JmhF}FZHjAC`4*dT(Ysa-)z-x3-O?9_L{JIIFWEQwB)
zhLYAT1WvM}4C?pAj@clpO6Mdcqm@{{JBe4Id6*9*DF+N0$ammI8emLrYrC#6lC{-(
zk-5fpQ}WG9^~w<^wpk1%Y^>U?tk)`oc}8bm!owb6B2t4MQyaq&b?uPk%f9I4#7`$u
z4T%QI5*de`@5WTs^c+T)M+6hepHw8!dxzNL&$To!}7lP?FV4qs#jwuEaZ4k$rxLjruLelVl3|+j{=6c4~7{
z$QhGS?DWzY1Q(qARymBviqGEu_JwZt0uqbb<-MP-0y!a1i|6yGBD<)^frbLO3taGMmbpR)fU?+Tfi
z1G9b$0IU-JR%ZG=?{YFTwl=2!eP;a4Wgn?8N8pH}`0(|=j1M=J#Iodk>ZzJ2V81T2
zZmtr-_sE-(^6<#-ak88x#`M2P_{<-A+Aq1bac$;}I;n^r7nXwDPnvtFmf2p9CfsSxP3)v3o5|Yls-vHgl?o_UW^cmbr(RSlTGV
zCe^+>4RQ0~?EAYeBhJ4
zx8x-nH%}(49V}OJ;i!5Zrtb?B6|hhQa)8rM8k@89ZN(2)jUTOBrb0Vh3V{~-gH%wI1J+C5SS_McalEm83lurb=^{p|l?fo<%mB7^@tQ+Q
z^>a+t5uT$!DIQt(YOV-_cW*Us&s+f-H9xTvHoHKcbElUfY(5BsXColbp;Lr_q#oS>
z1CgnCPz?baPQaBpdkm-PgN!Q$d~d0HN3*aWyH&HKHREpK(zYdxcYie_02re$u1Km1
z$JD22w-07=$&s_?E5gXJ0$!liUZs1}=MC{(ESuS89ZlkJEqHXx8D=*(|mhSj3Mhydp%P^+vt@hLcV<1=J{|+&?@|L|L|3p@5!+B
z%B`8$C~ePQ@#ANT8n0#o|7mz}RZUI%L{Lq98FzK#b%JAFyZ^5F4<
z;M$_-n}3j(GLQnn;zHQIJ&cV6tsSG8&}n_0qxOsuNU#U9r6tg)!ta?^Q0N=2-{=O%
zcNo3raim^LS{NGtX_O*zGE$uv))aD9Xn6`I1A_RJ1P5LIqsIXmRdToqz5qp(lnkYC
z^%jOGddf?tFmG0fYvlWyW7zwGZ=VVk$xz*`I2FWeWKk=I2FVDM7bZv6A%5%*J0B2n
z*2da_7#yMbh=vt55t?_mO0#ji5+L>Hn+-e5%$!$nU)wqZwb0#98*RvbqJKPX*
zR{fzj*&c{A$=I0UFS~m#0FE}qA&*ALn?~yiDfyLQPfBUN5e815B{3dj7TJY)1Mut`
zrVa$2@gIS?^J9fbRIu^H*l2FlF$E4rs8yL{9_;Q$=Imj8jJ}3
z>pB+E-g5yW%)O`nelzgi1YX|w>v!Bk@;WbN-U4Cd+{4skWH3#16#i;Gji!13&8%a_uFSXX>VP!H@
z9i#{5+k(7D6M5Ht3XbeC7%=31e#d%AQZQ8&o99zqJL-HwN@5jimO4f0CtYMbtSV|K
zv1opIC^DJVZY?pUvrHM%=)JnWlD9~y^7I?bKRVybkgclTW-f@t8$cs@EBMh1vUy^-
zx8mEPd?Q7JIT)S6tp-antyY4*bN~|_Uk=mz@H21?A=c&gNZ}4H6tj*uPLY>YC&h7&
z&CaXlDY)nAEBhtWN~+ANS?yI?H(euKPK8F%Hg$t1%fT%tAtH@Kjp+Ont)Gy4P3n*l
zLXb!MmT_9DOpn~I;R4J7@6Ha+|~A`{DICeK$Y?(LDS
zTIWQ2x*wBG2?J6{z9=5YV0n4m5;N(2iUmX3$9S~XF#6xWfCATp8JnRoR?O>`=qG2=
zCv%frCD0BR<-#W{eBd>U2_5LzDt<%)X+M~1m(N4%cQf@wl(C0IGkQv+uW<{uIP0T#
zz?)E4KX&?v8L1lyB36bv1|4R`iv|Qr#1VuAO%OnC51FHvqhg0kAL=?xpc@D&8AqO+1lV(s~
ztVBf{=V2?aGR+8_})vy_Rln;?Q~e*s=xrAHTzBFL!r
z@|rH4oJqJY5o_Sgqkh{nCQ3wPyhifaNgqR}Ua^6H`D}gu?SQ+4qw!!o^{nE35xSP_
znWb|dkU;vmus{`=J4<(EX69QHEEF7KvEES85veaCU^=#YSl
zWh}ny@j|G#Exm+h(F=(t)(qburFHT0mJ4hA-CHnf+>hnQJ+u;7GJ|*$fbL+*mRRLa
zuQB+aRgtPQB_seW0O0xhDTVoGRm9QE*x1RD{`bf4Av!Hy(>8|zIcQaN;l=k+CK6^G
z7^Ea#J#n3Y|&ZZ`B5hb6@_DLF`Q;fP#}7(zw7Am^Zv?QlqJB^H*f|m_pEh@$~hZsWVzpI
zd?S8AZG?Aafz>(vjSd36`G-x)!EZZNWE6rnw))A9N#IE$R;)G8kd>L=`|h+!xIgBb
z+r!B?HqEV&l8jEGAR*o}DUzV(hegv0APT0itq6gkw}9kEewGWU%8AoqhKub=W9R
zT~K5nf13R$F7XVAyWLfiyC^;98+qpLciyc0l6CQ(%gtHKA2nGW*BF>H?DKkyFE77O
zX&S%Rw4M^%3C07fHaQ|l`Hk<0X(ss~&3%DyTq2}n>U?x1n7bNc?Q3M0N4H~Ag%I5^
zS6f0*7oLE(td9!7DaT%M@7090}7@!4h=IWG;yOb
zphl=3imdrT^spobwG)Fyg_&k(@+}d+XawH5R7j>F7GCNzmU*c@J~PSkUd(&uNs$ov
z6h9lqeq$|M^B^Thm3~ZI)qZz-aX4_0a+TFYl|&L0NOhpfQQhOSvz@x==jq0h_@M)>
znVg{m?HR&qrIw9G748F8n?O5MjPt{BB5$MIAz3YKgyW6ZQ+RQ
zHJZ<)#2|<)HdA8o5T9TMpC9={E+Fv6kbYzk>^XU7o;>n@+I@R4hQ#8@6{A61Ip*E6
zAKh^iVQ=rrlgy?bV2jX7(?s0v!R*HF0`hfa<}9Z9(gbmHywzH{(jttQXzuhD1~18E
zEk_t}ws%p&=!^S3@OykaO~3=C{cP_%%~BLiFJWx?oh4eqncO2+fuY?IgsPmEV_-Hj
z@Y{U04^I*wK94iP{bw?Zi8gAx;r4*04d3)}^0VI-Q>
z=>nuS6V>;Z0cIOFnp47V%0FC1aX?YPju@ASP{?882JO+wh3aH7lxPQh@Cg&N6FR-d
zLvJ2$_6kuR&k$y{70u45Cz03ZMnS6~VHBW1nPAd(0)~CzB#rI}6L!R$-lL5TJpm8f
z`+)K}TKMup*`@sF(ae-x<1qp1jMI2ifIJxY%#kpwOF;#mB}}U8KB|@hEGVOpa-y|$
zxp%EQ;T;<*mmihtA*eB;5C7h;So0y4+X=1+n3*S$HBXmPajrzACz>eYf+MBRU4-J4
zP2E9OG`>+7eiOxM;1elhgcYVPt2O~=H9y@*#$K`AZ`I#sG$2aeft)I#jl7)4wg;xk
z4JsZ#i=XmlUkrUGx`D?TnK5^^${9n|HAXr3>f9kv@abbkR%c_A!&=%KzeuUk52*`1
zYj(h7DwPc10N)HRSPSWtWSZOWq1k~_%w{!3elqiie$Od0?B*fi@F^2KZF3-0431U*
zL;$t|8S~CtCIx@Y;#@+6LBC6u2uLRGOZfqu99@jx)(`US>_#S$~9`r+Q-aC-c1zh4A@hf`zR7se69;HuM@q^R*J?O0xkyq@qWRBL8
zq4GVl8pTNy*yV(v+@vMDopCUT^m>rech!31Q}BI?M!oQYEEph|@`&-1N--F7$Adm_
zBTU9POkJvqB?Xf*wxX0^izCf$nEP7f)k(>M@^)4Me0IMZlf_JweEhw)anq-L-hTdk
zsY=tZ+f1cqulw==P(*qzH3p7!)tvZ=LuH4O73mrs8;acpGH5i4dk6-!gvwRZezz|v
z&Ab5zCrzwPQ3#$QI2O0CiNmo<5rDS1l@ui75C?~lFX3XvGQb@^nRODKO;*;DLmDI*
zP{+dBP?Vl@Oj%S@wA^=@(D7tPsF*)jO=>Tf%AqDNWutCY0FDzL_k^AOLwiecUavF-vGGMxPE2+nYt1OFhUXZXT?&CWKG)?I5
z5g^E=svu;1&^79)P#yB%AVCy*)<_Rv({V++(EGuhJ|y5ip%6maHMPXxdl4FWxqoJta$;8nQeG|_Rk
zlOCDfC%ss<@yG@Zgp>+{^h64SGBs
z#?1)8h?gWekx@UXCiF(QCN#xPAiNt~G4@&@oy8N6Ok_u*OxT@rlns#)&mT8h*eg-k
zJ6I&*RBXVVH7%Dw%O*Ua3H01G*DE{8-p*$Zn||6$*o!Y$xJanPfejhT!*IVsECa
zJ?Bjw@La0Tir=){;Pww6BK?Ih$H_o>vFWI~v<&oe66M
z%PYy)E732w$oKfVPi$tRtQLcA;o743AH#LXm*^_BsF@&XY5Yi-0Gf1^i#!>}jerS1
zcowDnIThCDPNZCFWJSrxsKDvCtt39KF8CE!gb!!)-$_t)Rj-XY3FggeT?y%J-sF%C
z3^muo_eH%a=pAa??B;os+|il28-{%1n{STSNuWx9ck}g+TFCEF*l9u%(Vrh4YOPpm
z2Vj8q0f$sx&JFq_KS)nF1m3W@^_u|3OxeQeX^}I1uH<=PnWnv5eX+V&Mc%Hrni(~^
z1H^YM@x#8)zC(lQJgHiKq()b^;M)e~cOWxPgyy|QF75KX1wU_2-rte^E
zr0C>eZe#j;9o{KUKoWuxIp|dIMW}z-M)Oq4wVF$icY?4SGPLIk8}tkMC7BuJ+b&
zb{u-cxJ+)#2vZ%R|GK>u80x$ztjJ{SL8KX9`abc*GmXilR@+#{Q6C$o
zi|%zHeDL7Ue12^83y^c-S0*U+jUkGC?|OkI431NX-HAZjhbbuWks(gvr1*3)Xq0Qe
z0-rqdr*U#f*;xA|W>V`0=IhwhqSs;%WV_$^B}F
zk#V|`T@1jU)i-TI10G+V=ZymEAQE{wflG3pa0AEaR&jB{OTF2#vdx5;UaKAgL8
zo^CPDKVf=dOJNg)8DaSuM}ij6%$n4L;DBnPXa%(@a+xG*HMHgB4l~I42s~KJzV_f&
zYvO|_GeEObEg^l4aTUoDXMwZx13R}0j)v-_U`z8=}XMp|5l&Ejv&9EfYg
z;FG8{8_HmQ75Q8oceP4@y}PLq7dp9Qa5wfqhNq)2#k_|EoZA?rU
zV3`xts5nwD6teQ;hI5r6<`ss^z_c^_636JGo&D`}10R&v
z;&CS(C@+4Jn!uPwklc4&trLxatHp*0g^B?8IW3PYWM8z!Pwb^#XS7l@7{CTg?{QB5R0uG~S^XGh=VA+To&X7}VL<
zkkneP9zpPJ>*n|8?rzoneA|!B)v0l(Up3oaedZq?6$~8o8m#^^`}N=V_wVO_;KIsD
z{$0V}d3ygA{O$Sq6(oOQ?fokFcka?(1)E<9f&YId)317dCA9ph>GYN2@(aD?SK(h<
z=zj_~!v7J^|F5C`tLU!{oj*lQQGOTwwY~GJf?sP+e=2x`{eLh1j|$YUT7Jzz{?x)v
z`o|7_%}9RL@b|F#r#t{qL
Date: Wed, 13 Mar 2024 14:01:01 +0100
Subject: [PATCH 04/36] use Loadagent as base for markov
---
.../markov/MarkovAgentFundamentals.scala | 176 +++++++++---------
1 file changed, 90 insertions(+), 86 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
index 2cf4fbaf7e..e4bb127c8b 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
@@ -1,14 +1,14 @@
/*
- * © 2020. TU Dortmund University,
+ * © 2024. TU Dortmund University,
* Institute of Energy Systems, Energy Efficiency and Energy Economics,
* Research group Distribution grid planning and operation
*/
package edu.ie3.simona.agent.participant.markov
-import edu.ie3.datamodel.models.input.system.LoadInput
+import edu.ie3.datamodel.models.input.system.MarkovInput
import edu.ie3.datamodel.models.result.system.{
- LoadResult,
+ MarkovResult,
SystemParticipantResult,
}
import edu.ie3.simona.agent.ValueStore
@@ -28,27 +28,27 @@ import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.InputModelContainer
import edu.ie3.simona.agent.state.AgentState
import edu.ie3.simona.agent.state.AgentState.Idle
-import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
+import edu.ie3.simona.config.SimonaConfig.MarkovRuntimeConfig
import edu.ie3.simona.event.notifier.NotifierConfig
import edu.ie3.simona.exceptions.agent.InconsistentStateException
import edu.ie3.simona.model.SystemComponent
-import edu.ie3.simona.model.participant.CalcRelevantData.LoadRelevantData
+import edu.ie3.simona.model.participant.CalcRelevantData.MarkovRelevantData
import edu.ie3.simona.model.participant.ModelState.ConstantState
-import edu.ie3.simona.model.participant.load.FixedLoadModel.FixedLoadRelevantData
-import edu.ie3.simona.model.participant.load.profile.ProfileLoadModel.ProfileRelevantData
-import edu.ie3.simona.model.participant.load.profile.{
- LoadProfileStore,
- ProfileLoadModel,
+import edu.ie3.simona.model.participant.Markov.FixedMarkovModel.FixedMarkovRelevantData
+import edu.ie3.simona.model.participant.Markov.profile.ProfileMarkovModel.ProfileRelevantData
+import edu.ie3.simona.model.participant.Markov.profile.{
+ MarkovProfileStore,
+ ProfileMarkovModel,
}
-import edu.ie3.simona.model.participant.load.random.RandomLoadModel.RandomRelevantData
-import edu.ie3.simona.model.participant.load.random.{
- RandomLoadModel,
- RandomLoadParamStore,
+import edu.ie3.simona.model.participant.Markov.random.RandomMarkovModel.RandomRelevantData
+import edu.ie3.simona.model.participant.Markov.random.{
+ RandomMarkovModel,
+ RandomMarkovParamStore,
}
-import edu.ie3.simona.model.participant.load.{
- FixedLoadModel,
- LoadModel,
- LoadReference,
+import edu.ie3.simona.model.participant.Markov.{
+ FixedMarkovModel,
+ MarkovModel,
+ MarkovReference,
}
import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage.{
@@ -71,9 +71,12 @@ import java.util.UUID
import scala.collection.SortedSet
import scala.reflect.{ClassTag, classTag}
-protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovModel[
- LD
-]] extends ParticipantAgentFundamentals[
+protected trait MarkkovAgentFundamentals[
+ LD <: MarkovRelevantData,
+ LM <: MarkovModel[
+ LD
+ ],
+] extends ParticipantAgentFundamentals[
ApparentPower,
LD,
ConstantState.type,
@@ -112,8 +115,8 @@ protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovMod
* based on the data source definition
*/
override def determineModelBaseStateData(
- inputModel: InputModelContainer[LoadInput],
- modelConfig: LoadRuntimeConfig,
+ inputModel: InputModelContainer[MarkovInput],
+ modelConfig: MarkovRuntimeConfig,
services: Iterable[SecondaryDataService[_ <: SecondaryData]],
simulationStartDate: ZonedDateTime,
simulationEndDate: ZonedDateTime,
@@ -140,10 +143,10 @@ protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovMod
* Also register for services, where needed. */
val lastTickInSimulation = simulationEndDate.toTick(simulationStartDate)
val additionalActivationTicks = model match {
- /* If no secondary data is needed (implicitly by fixed load model), add activation ticks for the simple model */
- case fixedLoadModel: FixedLoadModel =>
+ /* If no secondary data is needed (implicitly by fixed Markov model), add activation ticks for the simple model */
+ case fixedMarkovModel: FixedMarkovModel =>
/* As participant agents always return their last known operation point on request, it is sufficient
- * to let a fixed load model determine it's operation point on:
+ * to let a fixed Markov model determine it's operation point on:
* 1) The first tick of the simulation
* 2) The tick, it turns on (in time dependent operation)
* 3) The tick, it turns off (in time dependent operation)
@@ -151,22 +154,22 @@ protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovMod
* doesn't affect anything then */
SortedSet[Long](
SimonaConstants.FIRST_TICK_IN_SIMULATION,
- fixedLoadModel.operationInterval.start,
- fixedLoadModel.operationInterval.end,
+ fixedMarkovModel.operationInterval.start,
+ fixedMarkovModel.operationInterval.end,
).filterNot(_ == lastTickInSimulation)
- case profileLoadModel: ProfileLoadModel =>
+ case profileMarkovModel: ProfileMarkovModel =>
activationTicksInOperationTime(
simulationStartDate,
- LoadProfileStore.resolution.getSeconds,
- profileLoadModel.operationInterval.start,
- profileLoadModel.operationInterval.end,
+ MarkovProfileStore.resolution.getSeconds,
+ profileMarkovModel.operationInterval.start,
+ profileMarkovModel.operationInterval.end,
)
- case randomLoadModel: RandomLoadModel =>
+ case randomMarkovModel: RandomMarkovModel =>
activationTicksInOperationTime(
simulationStartDate,
- RandomLoadParamStore.resolution.getSeconds,
- randomLoadModel.operationInterval.start,
- randomLoadModel.operationInterval.end,
+ RandomMarkovParamStore.resolution.getSeconds,
+ randomMarkovModel.operationInterval.start,
+ randomMarkovModel.operationInterval.end,
)
case _ =>
SortedSet.empty[Long]
@@ -200,8 +203,8 @@ protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovMod
}
override def buildModel(
- inputModel: InputModelContainer[LoadInput],
- modelConfig: LoadRuntimeConfig,
+ inputModel: InputModelContainer[MarkovInput],
+ modelConfig: MarkovRuntimeConfig,
simulationStartDate: ZonedDateTime,
simulationEndDate: ZonedDateTime,
): LM = {
@@ -211,7 +214,8 @@ protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovMod
simulationEndDate,
inputModel.electricalInputModel.getOperationTime,
)
- val reference = LoadReference(inputModel.electricalInputModel, modelConfig)
+ val reference =
+ MarkovReference(inputModel.electricalInputModel, modelConfig)
buildModel(
inputModel.electricalInputModel,
operationInterval,
@@ -221,10 +225,10 @@ protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovMod
}
protected def buildModel(
- inputModel: LoadInput,
+ inputModel: MarkovInput,
operationInterval: OperationInterval,
- modelConfig: LoadRuntimeConfig,
- reference: LoadReference,
+ modelConfig: MarkovRuntimeConfig,
+ reference: MarkovReference,
): LM
override protected def createInitialState(
@@ -310,7 +314,7 @@ protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovMod
scheduler: ActorRef,
): FSM.State[AgentState, ParticipantStateData[ApparentPower]] =
throw new InconsistentStateException(
- s"Load model is not able to calculate power with secondary data."
+ s"Markov model is not able to calculate power with secondary data."
)
/** Determine the average result within the given tick window
@@ -358,7 +362,7 @@ protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovMod
dateTime: ZonedDateTime,
result: ApparentPower,
): SystemParticipantResult =
- new LoadResult(
+ new MarkovResult(
dateTime,
uuid,
result.p.toMegawatts.asMegaWatt,
@@ -374,21 +378,21 @@ protected trait MarkkovAgentFundamentals[LD <: LoadRelevantData, LM <: MarkovMod
): ModelState.ConstantState.type = modelState
}
-object LoadAgentFundamentals {
- trait FixedLoadAgentFundamentals
- extends LoadAgentFundamentals[
- FixedLoadModel.FixedLoadRelevantData.type,
- FixedLoadModel,
+object MarkovAgentFundamentals {
+ trait FixedMarkovAgentFundamentals
+ extends MarkovAgentFundamentals[
+ FixedMarkovModel.FixedMarkovRelevantData.type,
+ FixedMarkovModel,
] {
- this: MarkovAgent.FixedLoadAgent =>
+ this: MarkovAgent.FixedMarkovAgent =>
override def buildModel(
- inputModel: LoadInput,
+ inputModel: MarkovInput,
operationInterval: OperationInterval,
- modelConfig: LoadRuntimeConfig,
- reference: LoadReference,
- ): FixedLoadModel =
- FixedLoadModel(
+ modelConfig: MarkovRuntimeConfig,
+ reference: MarkovReference,
+ ): FixedMarkovModel =
+ FixedMarkovModel(
inputModel,
modelConfig.scaling,
operationInterval,
@@ -398,13 +402,13 @@ object LoadAgentFundamentals {
override protected def createCalcRelevantData(
baseStateData: ParticipantModelBaseStateData[
ApparentPower,
- FixedLoadRelevantData.type,
+ FixedMarkovRelevantData.type,
ConstantState.type,
- FixedLoadModel,
+ FixedMarkovModel,
],
tick: Long,
- ): FixedLoadRelevantData.type =
- FixedLoadRelevantData
+ ): FixedMarkovRelevantData.type =
+ FixedMarkovRelevantData
/** Partial function, that is able to transfer
* [[ParticipantModelBaseStateData]] (holding the actual calculation model)
@@ -414,9 +418,9 @@ object LoadAgentFundamentals {
Long,
ParticipantModelBaseStateData[
ApparentPower,
- FixedLoadRelevantData.type,
+ FixedMarkovRelevantData.type,
ConstantState.type,
- FixedLoadModel,
+ FixedMarkovModel,
],
ConstantState.type,
Dimensionless,
@@ -424,9 +428,9 @@ object LoadAgentFundamentals {
tick: Long,
baseStateData: ParticipantModelBaseStateData[
ApparentPower,
- FixedLoadRelevantData.type,
+ FixedMarkovRelevantData.type,
ConstantState.type,
- FixedLoadModel,
+ FixedMarkovModel,
],
state: ConstantState.type,
voltage: Dimensionless,
@@ -435,24 +439,24 @@ object LoadAgentFundamentals {
tick,
voltage,
state,
- FixedLoadRelevantData,
+ FixedMarkovRelevantData,
)
}
- trait ProfileLoadAgentFundamentals
- extends LoadAgentFundamentals[
+ trait ProfileMarkovAgentFundamentals
+ extends MarkovAgentFundamentals[
ProfileRelevantData,
- ProfileLoadModel,
+ ProfileMarkovModel,
] {
- this: MarkovAgent.ProfileLoadAgent =>
+ this: MarkovAgent.ProfileMarkovAgent =>
override def buildModel(
- inputModel: LoadInput,
+ inputModel: MarkovInput,
operationInterval: OperationInterval,
- modelConfig: LoadRuntimeConfig,
- reference: LoadReference,
- ): ProfileLoadModel =
- ProfileLoadModel(
+ modelConfig: MarkovRuntimeConfig,
+ reference: MarkovReference,
+ ): ProfileMarkovModel =
+ ProfileMarkovModel(
inputModel,
operationInterval,
modelConfig.scaling,
@@ -464,7 +468,7 @@ object LoadAgentFundamentals {
ApparentPower,
ProfileRelevantData,
ConstantState.type,
- ProfileLoadModel,
+ ProfileMarkovModel,
],
currentTick: Long,
): ProfileRelevantData =
@@ -482,7 +486,7 @@ object LoadAgentFundamentals {
ApparentPower,
ProfileRelevantData,
ConstantState.type,
- ProfileLoadModel,
+ ProfileMarkovModel,
],
ConstantState.type,
Dimensionless,
@@ -499,20 +503,20 @@ object LoadAgentFundamentals {
}
}
- trait RandomLoadAgentFundamentals
- extends LoadAgentFundamentals[
+ trait RandomMarkovAgentFundamentals
+ extends MarkovAgentFundamentals[
RandomRelevantData,
- RandomLoadModel,
+ RandomMarkovModel,
] {
- this: MarkovAgent.RandomLoadAgent =>
+ this: MarkovAgent.RandomMarkovAgent =>
override def buildModel(
- inputModel: LoadInput,
+ inputModel: MarkovInput,
operationInterval: OperationInterval,
- modelConfig: LoadRuntimeConfig,
- reference: LoadReference,
- ): RandomLoadModel =
- RandomLoadModel(
+ modelConfig: MarkovRuntimeConfig,
+ reference: MarkovReference,
+ ): RandomMarkovModel =
+ RandomMarkovModel(
inputModel,
operationInterval,
modelConfig.scaling,
@@ -524,7 +528,7 @@ object LoadAgentFundamentals {
ApparentPower,
RandomRelevantData,
ConstantState.type,
- RandomLoadModel,
+ RandomMarkovModel,
],
tick: Long,
): RandomRelevantData =
@@ -542,7 +546,7 @@ object LoadAgentFundamentals {
ApparentPower,
RandomRelevantData,
ConstantState.type,
- RandomLoadModel,
+ RandomMarkovModel,
],
ConstantState.type,
Dimensionless,
From 37567a82fa8bd92c8482b232ac3fe109707c2737 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Thu, 14 Mar 2024 11:33:00 +0100
Subject: [PATCH 05/36] Extend File Structure and add time/appliance.scala
---
.../markov/appliance_category.scala | 5 ++
.../model/participant/markov/time.scala | 51 +++++++++++++++++++
2 files changed, 56 insertions(+)
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/appliance_category.scala
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/time.scala
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/appliance_category.scala b/src/main/scala/edu/ie3/simona/model/participant/markov/appliance_category.scala
new file mode 100644
index 0000000000..375651aca6
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/model/participant/markov/appliance_category.scala
@@ -0,0 +1,5 @@
+object ApplianceCategory extends Enumeration {
+ type ApplianceCategory = Value
+ val DISH_WASHER, WASHING_MACHINE, DRYER, STOVE, FRIDGE, FREEZER, TELEVISION, VIDEO_RECORDER, PC, TELECOMMUNICATION, LIGHTING, WATER_HEATING, OTHER_LOAD = Value
+}
+
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/time.scala b/src/main/scala/edu/ie3/simona/model/participant/markov/time.scala
new file mode 100644
index 0000000000..23cfb4e90e
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/model/participant/markov/time.scala
@@ -0,0 +1,51 @@
+import java.time.LocalDateTime
+import java.time.temporal.ChronoUnit
+
+object Season extends Enumeration {
+ type Season = Value
+ val SPRING, SUMMER, AUTUMN, WINTER = Value
+
+ def getSeason(dateTime: LocalDateTime): Season = {
+ val month = dateTime.getMonthValue
+ if (month >= 3 && month <= 5) SPRING
+ else if (month >= 6 && month <= 8) SUMMER
+ else if (month >= 9 && month <= 11) AUTUMN
+ else WINTER
+ }
+}
+
+object DayType extends Enumeration {
+ type DayType = Value
+ val WEEKDAY, SATURDAY, SUNDAY = Value
+
+ def getDayType(dateTime: LocalDateTime): DayType = {
+ val weekday = dateTime.getDayOfWeek.getValue
+ if (weekday < 6) WEEKDAY
+ else if (weekday == 6) SATURDAY
+ else SUNDAY
+ }
+}
+
+case class TimeInterval(start: LocalDateTime, end: LocalDateTime) {
+ def isWithin(time: LocalDateTime): Boolean = {
+ start.compareTo(time) <= 0 && end.compareTo(time) > 0
+ }
+}
+
+object TimeInterval {
+ def getOperationInterval(start: LocalDateTime, duration: Long): TimeInterval = {
+ val end = start.plus(duration, ChronoUnit.MILLIS)
+ TimeInterval(start, end)
+ }
+}
+
+object Main extends App {
+ val now = LocalDateTime.now()
+
+ println("Current Season: " + Season.getSeason(now))
+ println("Current Day Type: " + DayType.getDayType(now))
+
+ val operationInterval = TimeInterval.getOperationInterval(now, 3600000) // 1 hour duration
+ println("Is within operation interval? " + operationInterval.isWithin(now))
+}
+
From e4bb77c3a4cf4110cef3877b156baef88d52ad24 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Sun, 24 Mar 2024 01:25:19 +0100
Subject: [PATCH 06/36] Loadagent base adjusted
---
.../participant/markov/MarkovAgent.scala | 32 ----
.../markov/MarkovAgentFundamentals.scala | 145 ------------------
.../edu/ie3/simona/config/SimonaConfig.scala | 5 +-
.../model/participant/CalcRelevantData.scala | 1 +
4 files changed, 5 insertions(+), 178 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
index 771ce1dcff..b91712bc02 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
@@ -41,32 +41,14 @@ object MarkovAgent {
listener: Iterable[ActorRef],
): Props =
MarkovModelBehaviour(initStateData.modelConfig.modelBehaviour) match {
- case MarkovModelBehaviour.FIX =>
- Props(new FixedMarkovAgent(scheduler, initStateData, listener))
case MarkovModelBehaviour.PROFILE =>
Props(new ProfileMarkovAgent(scheduler, initStateData, listener))
- case MarkovModelBehaviour.RANDOM =>
- Props(new RandomMarkovAgent(scheduler, initStateData, listener))
case unsupported =>
throw new IllegalArgumentException(
s"The markov agent behaviour '$unsupported' is currently not supported."
)
}
- final class FixedMarkovAgent(
- scheduler: ActorRef,
- initStateData: ParticipantInitializeStateData[
- MarkovInput,
- MarkovRuntimeConfig,
- ApparentPower,
- ],
- override val listener: Iterable[ActorRef],
- ) extends MarkovAgent[
- FixedMarkovModel.FixedMarkovRelevantData.type,
- FixedMarkovModel,
- ](scheduler, initStateData, listener)
- with FixedMarkovAgentFundamentals
-
final class ProfileMarkovAgent(
scheduler: ActorRef,
initStateData: ParticipantInitializeStateData[
@@ -80,20 +62,6 @@ object MarkovAgent {
ProfileMarkovModel,
](scheduler, initStateData, listener)
with ProfileMarkovAgentFundamentals
-
- final class RandomMarkovAgent(
- scheduler: ActorRef,
- initStateData: ParticipantInitializeStateData[
- MarkovInput,
- MarkovRuntimeConfig,
- ApparentPower,
- ],
- override val listener: Iterable[ActorRef],
- ) extends MarkovAgent[
- RandomRelevantData,
- RandomMarkovModel,
- ](scheduler, initStateData, listener)
- with RandomMarkovAgentFundamentals
}
/** Creating a markov agent
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
index e4bb127c8b..71ba79efea 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
@@ -143,20 +143,6 @@ protected trait MarkkovAgentFundamentals[
* Also register for services, where needed. */
val lastTickInSimulation = simulationEndDate.toTick(simulationStartDate)
val additionalActivationTicks = model match {
- /* If no secondary data is needed (implicitly by fixed Markov model), add activation ticks for the simple model */
- case fixedMarkovModel: FixedMarkovModel =>
- /* As participant agents always return their last known operation point on request, it is sufficient
- * to let a fixed Markov model determine it's operation point on:
- * 1) The first tick of the simulation
- * 2) The tick, it turns on (in time dependent operation)
- * 3) The tick, it turns off (in time dependent operation)
- * Coinciding ticks are summarized and the last tick is removed, as the change in operation status
- * doesn't affect anything then */
- SortedSet[Long](
- SimonaConstants.FIRST_TICK_IN_SIMULATION,
- fixedMarkovModel.operationInterval.start,
- fixedMarkovModel.operationInterval.end,
- ).filterNot(_ == lastTickInSimulation)
case profileMarkovModel: ProfileMarkovModel =>
activationTicksInOperationTime(
simulationStartDate,
@@ -164,13 +150,6 @@ protected trait MarkkovAgentFundamentals[
profileMarkovModel.operationInterval.start,
profileMarkovModel.operationInterval.end,
)
- case randomMarkovModel: RandomMarkovModel =>
- activationTicksInOperationTime(
- simulationStartDate,
- RandomMarkovParamStore.resolution.getSeconds,
- randomMarkovModel.operationInterval.start,
- randomMarkovModel.operationInterval.end,
- )
case _ =>
SortedSet.empty[Long]
}
@@ -379,70 +358,6 @@ protected trait MarkkovAgentFundamentals[
}
object MarkovAgentFundamentals {
- trait FixedMarkovAgentFundamentals
- extends MarkovAgentFundamentals[
- FixedMarkovModel.FixedMarkovRelevantData.type,
- FixedMarkovModel,
- ] {
- this: MarkovAgent.FixedMarkovAgent =>
-
- override def buildModel(
- inputModel: MarkovInput,
- operationInterval: OperationInterval,
- modelConfig: MarkovRuntimeConfig,
- reference: MarkovReference,
- ): FixedMarkovModel =
- FixedMarkovModel(
- inputModel,
- modelConfig.scaling,
- operationInterval,
- reference,
- )
-
- override protected def createCalcRelevantData(
- baseStateData: ParticipantModelBaseStateData[
- ApparentPower,
- FixedMarkovRelevantData.type,
- ConstantState.type,
- FixedMarkovModel,
- ],
- tick: Long,
- ): FixedMarkovRelevantData.type =
- FixedMarkovRelevantData
-
- /** Partial function, that is able to transfer
- * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
- * into a pair of active and reactive power
- */
- override val calculateModelPowerFunc: (
- Long,
- ParticipantModelBaseStateData[
- ApparentPower,
- FixedMarkovRelevantData.type,
- ConstantState.type,
- FixedMarkovModel,
- ],
- ConstantState.type,
- Dimensionless,
- ) => ApparentPower = (
- tick: Long,
- baseStateData: ParticipantModelBaseStateData[
- ApparentPower,
- FixedMarkovRelevantData.type,
- ConstantState.type,
- FixedMarkovModel,
- ],
- state: ConstantState.type,
- voltage: Dimensionless,
- ) =>
- baseStateData.model.calculatePower(
- tick,
- voltage,
- state,
- FixedMarkovRelevantData,
- )
- }
-
trait ProfileMarkovAgentFundamentals
extends MarkovAgentFundamentals[
ProfileRelevantData,
@@ -502,64 +417,4 @@ object MarkovAgentFundamentals {
)
}
}
-
- trait RandomMarkovAgentFundamentals
- extends MarkovAgentFundamentals[
- RandomRelevantData,
- RandomMarkovModel,
- ] {
- this: MarkovAgent.RandomMarkovAgent =>
-
- override def buildModel(
- inputModel: MarkovInput,
- operationInterval: OperationInterval,
- modelConfig: MarkovRuntimeConfig,
- reference: MarkovReference,
- ): RandomMarkovModel =
- RandomMarkovModel(
- inputModel,
- operationInterval,
- modelConfig.scaling,
- reference,
- )
-
- override protected def createCalcRelevantData(
- baseStateData: ParticipantModelBaseStateData[
- ApparentPower,
- RandomRelevantData,
- ConstantState.type,
- RandomMarkovModel,
- ],
- tick: Long,
- ): RandomRelevantData =
- RandomRelevantData(
- tick.toDateTime(baseStateData.startDate)
- )
-
- /** Partial function, that is able to transfer
- * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
- * into a pair of active and reactive power
- */
- override val calculateModelPowerFunc: (
- Long,
- ParticipantModelBaseStateData[
- ApparentPower,
- RandomRelevantData,
- ConstantState.type,
- RandomMarkovModel,
- ],
- ConstantState.type,
- Dimensionless,
- ) => ApparentPower = (tick, baseStateData, _, voltage) => {
- val profileRelevantData =
- createCalcRelevantData(baseStateData, tick)
-
- baseStateData.model.calculatePower(
- currentTick,
- voltage,
- ConstantState,
- profileRelevantData,
- )
- }
- }
}
diff --git a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
index c64c62ca3f..a8d12f3d0c 100644
--- a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
+++ b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
@@ -2955,4 +2955,7 @@ object SimonaConfig {
}
}
}
-}
+
+ case class MarkovRuntimeConfig {
+
+ }
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/model/participant/CalcRelevantData.scala b/src/main/scala/edu/ie3/simona/model/participant/CalcRelevantData.scala
index 8b76d10fc6..0e557be074 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/CalcRelevantData.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/CalcRelevantData.scala
@@ -24,4 +24,5 @@ object CalcRelevantData {
*/
trait LoadRelevantData extends CalcRelevantData
+ case class MarkovRelevantData
}
From 2aac783bdbe8b91ede44b276b6b1e88d27cb8c47 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 25 Mar 2024 10:04:56 +0100
Subject: [PATCH 07/36] Loadagent based MarkovAgent V0
---
.../participant/markov/MarkovAgent.scala | 17 +--
.../markov/MarkovAgentFundamentals.scala | 7 --
.../edu/ie3/simona/config/SimonaConfig.scala | 5 +-
.../markov/MarkovHousehouldkey.Scala | 104 ++++++++++++++++++
.../participant/markov/MarkovModel.scala | 0
.../markov/MarkovModelBehaviour.Scala | 15 +++
.../markov/{time.scala => Season.scala} | 0
.../model/participant/markov/TypeDay.Scala | 46 ++++++++
8 files changed, 168 insertions(+), 26 deletions(-)
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/MarkovHousehouldkey.Scala
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModel.scala
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModelBehaviour.Scala
rename src/main/scala/edu/ie3/simona/model/participant/markov/{time.scala => Season.scala} (100%)
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/TypeDay.Scala
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
index b91712bc02..22a099eb3c 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
@@ -9,11 +9,8 @@ package edu.ie3.simona.agent.participant.markov
import edu.ie3.datamodel.models.input.system.MarkovInput
import edu.ie3.simona.agent.participant.ParticipantAgent
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
-import edu.ie3.simona.agent.participant.markov.MarkovAgentFundamentals.{
- FixedMarkovAgentFundamentals,
- ProfileMarkovAgentFundamentals,
- RandomMarkovAgentFundamentals,
-}
+import edu.ie3.simona.agent.participant.markov.MarkovAgentFundamentals.ProfileMarkovAgentFundamentals
+
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.ParticipantInitializeStateData
import edu.ie3.simona.config.SimonaConfig.MarkovRuntimeConfig
@@ -24,7 +21,6 @@ import edu.ie3.simona.model.participant.markov.profile.ProfileMarkovModel.Profil
import edu.ie3.simona.model.participant.markov.random.RandomMarkovModel
import edu.ie3.simona.model.participant.markov.random.RandomMarkovModel.RandomRelevantData
import edu.ie3.simona.model.participant.markov.{
- FixedMarkovModel,
MarkovModel,
MarkovModelBehaviour,
}
@@ -89,13 +85,4 @@ abstract class MarkovAgent[MD <: MarkovRelevantData, MM <: MarkovModel[MD]](
MM,
](scheduler, initStateData)
with MarkovAgentFundamentals[MD, MM] {
- /*
- * "Hey, SIMONA! What is handled in ParticipantAgent?"
- * "Hey, dude! The following things are handled in ParticipantAgent:
- * 1) Initialization of Agent
- * 2) Event reactions in Idle state
- * 3) Handling of incoming information
- * 4) Performing model calculations
- * "
- */
}
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
index 71ba79efea..8fe6c3055a 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgentFundamentals.scala
@@ -34,19 +34,12 @@ import edu.ie3.simona.exceptions.agent.InconsistentStateException
import edu.ie3.simona.model.SystemComponent
import edu.ie3.simona.model.participant.CalcRelevantData.MarkovRelevantData
import edu.ie3.simona.model.participant.ModelState.ConstantState
-import edu.ie3.simona.model.participant.Markov.FixedMarkovModel.FixedMarkovRelevantData
import edu.ie3.simona.model.participant.Markov.profile.ProfileMarkovModel.ProfileRelevantData
import edu.ie3.simona.model.participant.Markov.profile.{
MarkovProfileStore,
ProfileMarkovModel,
}
-import edu.ie3.simona.model.participant.Markov.random.RandomMarkovModel.RandomRelevantData
-import edu.ie3.simona.model.participant.Markov.random.{
- RandomMarkovModel,
- RandomMarkovParamStore,
-}
import edu.ie3.simona.model.participant.Markov.{
- FixedMarkovModel,
MarkovModel,
MarkovReference,
}
diff --git a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
index a8d12f3d0c..7e386465ef 100644
--- a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
+++ b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
@@ -2955,7 +2955,4 @@ object SimonaConfig {
}
}
}
-
- case class MarkovRuntimeConfig {
-
- }
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovHousehouldkey.Scala b/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovHousehouldkey.Scala
new file mode 100644
index 0000000000..d0fae5e5cb
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovHousehouldkey.Scala
@@ -0,0 +1,104 @@
+/*
+ * © 2020. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.model.participant.load.profile
+
+import java.time.ZonedDateTime
+import edu.ie3.datamodel.exceptions.ParsingException
+import edu.ie3.datamodel.models.profile.StandardLoadProfile
+import edu.ie3.simona.model.participant.load
+import edu.ie3.simona.model.participant.load.{DayType, profile}
+
+/** A key describing a MarkovHousehold profile, consisting of consumer type, a season and a
+ * day type. Is used to store MarkovHousehold profile values for a single type.
+ *
+ * @param standardMarkovHousehold
+ * a consumer type
+ * @param season
+ * a season
+ * @param dayType
+ * a day type
+ */
+final case class MarkovHouseholdKey(
+ standardMarkovHousehold: StandardLoadProfile,
+ season: Season.Value,
+ dayType: DayType.Value,
+)
+
+case object MarkovHouseholdKey {
+
+ /** Creates a MarkovHousehold key from given csv header, i.e. "g0SSo"
+ *
+ * @param headerKey
+ * the header
+ * @return
+ * a MarkovHousehold key
+ */
+ def apply(headerKey: String): MarkovHouseholdKey = {
+ val regex = "([a-z][0-9])([A-Z][a-z])([A-Z][a-z])".r
+
+ headerKey match {
+ case regex(MarkovHouseholdKey, seasonKey, dayTypeKey) =>
+ MarkovHouseholdKey(MarkovHouseholdKey, seasonKey, dayTypeKey)
+ case _ =>
+ throw new RuntimeException(
+ s"Provided MarkovHousehold profile header key $headerKey is malformed. It has to be of the form ${regex.pattern} e.g. 'g0WiSu'."
+ )
+ }
+ }
+
+ /** Creates a MarkovHousehold key from three Strings describing a MarkovHousehold
+ *
+ * @param MarkovHousehold
+ * Key describing the MarkovHousehold
+ * @param season
+ * Key describing the season
+ * @param dayType
+ * Key describing the day type
+ * @return
+ * a MarkovHousehold key
+ */
+ def apply(
+ MarkovHousehold: String,
+ season: String,
+ dayType: String,
+ ): MarkovHouseholdKey = {
+ try {
+ new MarkovHouseholdKey(
+ StandardLoadProfile.parse(MarkovHousehold),
+ Season(season),
+ DayType(dayType),
+ )
+ } catch {
+ case e: ParsingException =>
+ throw new IllegalArgumentException(
+ s"Cannot parse '$MarkovHousehold' to a now StandardLoadProfile.",
+ e,
+ )
+ }
+ }
+
+ /** Creates a MarkovHousehold key from a consumer type value and a ZonedDateTime
+ * object
+ *
+ * @param MarkovHousehold
+ * The standard MarkovHousehold
+ * @param time
+ * The time
+ * @return
+ * a MarkovHousehold key
+ */
+ def apply(
+ MarkovHousehold: StandardLoadProfile,
+ time: ZonedDateTime,
+ ): MarkovHouseholdKey = {
+ new MarkovHouseholdKey(
+ MarkovHousehold,
+ profile.Season(time),
+ load.DayType(time.getDayOfWeek),
+ )
+ }
+}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModel.scala
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModelBehaviour.Scala b/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModelBehaviour.Scala
new file mode 100644
index 0000000000..c8faaceb33
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModelBehaviour.Scala
@@ -0,0 +1,15 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.model.participant.load
+
+import edu.ie3.simona.util.ParsableEnumeration
+
+/** Enumeration to describe all eligible load model behaviours
+ */
+case object MarkovModelBehaviour extends ParsableEnumeration {
+ val HOUSEHOLD: Value = Value("household")
+}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/time.scala b/src/main/scala/edu/ie3/simona/model/participant/markov/Season.scala
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/markov/time.scala
rename to src/main/scala/edu/ie3/simona/model/participant/markov/Season.scala
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/TypeDay.Scala b/src/main/scala/edu/ie3/simona/model/participant/markov/TypeDay.Scala
new file mode 100644
index 0000000000..c97cdbdd67
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/model/participant/markov/TypeDay.Scala
@@ -0,0 +1,46 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.model.participant.Markov.profile
+
+import java.time.ZonedDateTime
+
+import edu.ie3.util.TimeUtil
+
+// needs to be imported for max function
+import scala.math.Ordering.Double.IeeeOrdering
+
+/** Stores a slice of Markov Househould data, that comprises a whole day (96 quarter
+ * hours). The data describes a typical day, that can unequivocally be
+ * identified by a [[MarkovHousehouldKey]].
+ *
+ * @param values
+ * 96 quarter-hour values for this Markov Household,
+ */
+final case class TypeDayProfile(private val values: Array[Double]) {
+ if (values.length != 96)
+ throw new IllegalArgumentException(
+ "You may only instantiate type day parameters with 96 values."
+ )
+
+ /** Returns a value for given time. If time is not a 15 min step, it is
+ * rounded up to the next 15 min slice.
+ *
+ * @param time
+ * the time
+ * @return
+ * the load value
+ */
+ def getQuarterHourEnergy(time: ZonedDateTime): Double = {
+ val quartH = TimeUtil.withDefaults.getQuarterHourOfDay(time)
+ values(quartH)
+ }
+
+ /** @return
+ * the maximum value of this MarkovHousehold
+ */
+ def getMaxValue: Double = values.maxOption.getOrElse(Double.PositiveInfinity)
+}
From a384d91b59dbaaf03d288f9812d6ec6426eea9d2 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 1 Apr 2024 21:44:59 +0200
Subject: [PATCH 08/36] MarkovAgent Basics
---
.../resources/markov/appliances/load_ts.xlsx | Bin 11666 -> 0 bytes
src/main/resources/markov/config.yaml | 5 -
.../participant/load/markov/MarkovAgent.scala | 72 +++++
.../markov/MarkovAgentFundamentals.Scala} | 262 ++++++------------
.../participant/markov/MarkovAgent.scala | 88 ------
.../model/participant/CalcRelevantData.scala | 1 -
.../markov/MarkovData.scala} | 0
.../participant/load/markov/MarkovModel.scala | 5 +
.../resources}/appliances/average_hh.csv | 0
.../resources}/appliances/by_income.csv | 0
.../resources}/appliances/by_inhabitants.csv | 0
.../markov/resources}/appliances/by_type.csv | 0
.../markov/resources}/appliances/load_ts.csv | 0
.../switch_on_probabilities/dish_washer.csv | 0
.../switch_on_probabilities/dryer.csv | 0
.../switch_on_probabilities/freezer.csv | 0
.../switch_on_probabilities/fridge.csv | 0
.../switch_on_probabilities/lighting.csv | 0
.../switch_on_probabilities/pc.csv | 0
.../switch_on_probabilities/stove.csv | 0
.../telecommunication.csv | 0
.../switch_on_probabilities/television.csv | 0
.../video_recorder.csv | 0
.../washing_machine.csv | 0
.../switch_on_probabilities/water_heating.csv | 0
.../usage_probabilities.csv | 0
.../markov/MarkovHousehouldkey.Scala | 104 -------
.../markov/MarkovModelBehaviour.Scala | 15 -
.../model/participant/markov/Season.scala | 51 ----
.../model/participant/markov/TypeDay.Scala | 46 ---
.../markov/appliance_category.scala | 5 -
.../MarkovAgentModelCalculationSpec.scala | 5 +
.../participant/load/MarkovModelSpec.scala | 5 +
33 files changed, 175 insertions(+), 489 deletions(-)
delete mode 100644 src/main/resources/markov/appliances/load_ts.xlsx
delete mode 100644 src/main/resources/markov/config.yaml
create mode 100644 src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
rename src/main/scala/edu/ie3/simona/agent/participant/{markov/MarkovAgentFundamentals.scala => load/markov/MarkovAgentFundamentals.Scala} (62%)
delete mode 100644 src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
rename src/main/scala/edu/ie3/simona/model/participant/{markov/MarkovModel.scala => load/markov/MarkovData.scala} (100%)
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/appliances/average_hh.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/appliances/by_income.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/appliances/by_inhabitants.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/appliances/by_type.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/appliances/load_ts.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/dish_washer.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/dryer.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/freezer.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/fridge.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/lighting.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/pc.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/stove.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/telecommunication.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/television.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/video_recorder.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/washing_machine.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/switch_on_probabilities/water_heating.csv (100%)
rename src/main/{resources/markov => scala/edu/ie3/simona/model/participant/load/markov/resources}/probabilities/usage_probabilities/usage_probabilities.csv (100%)
delete mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/MarkovHousehouldkey.Scala
delete mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModelBehaviour.Scala
delete mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/Season.scala
delete mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/TypeDay.Scala
delete mode 100644 src/main/scala/edu/ie3/simona/model/participant/markov/appliance_category.scala
create mode 100644 src/test/scala/edu/ie3/simona/agent/participant/MarkovAgentModelCalculationSpec.scala
create mode 100644 src/test/scala/edu/ie3/simona/model/participant/load/MarkovModelSpec.scala
diff --git a/src/main/resources/markov/appliances/load_ts.xlsx b/src/main/resources/markov/appliances/load_ts.xlsx
deleted file mode 100644
index c5b5696f63b2ca4b06513296f5bf76f8b3db2a95..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001
literal 11666
zcmeHt^;ccV()K|@a0ndST@UUOG`PFFy9al--~j>zC%8j!cZcBa4#6EhX6D{^W-|Bt
z3-0Y7datuiSFPRM^;FfadgLS_Ab|iV05kvqAOaYiq?_r00RSN)m=-x)_i@+=x92ZGj|~s`#N1NgB8}b{OPV%MEl>A0%IJtox%);oG&5b0
zW(L0>k^kfzK+ZCv2ECNYX3VRb4=qP(=w~P4@&nmADsJ~Gj!|RArx+s{Ok!)=wYl
z2ArHF?fWo2LjeFUFAxB^zrnIrnStc;)z+k5Q3v-5OI-(JD@S^|-{$|p@xK^@f7yCT
zoV08g1AOqQ*kefl&Gd2%vY_Ng0kI|`C2wExC8XM@98#R+HVRy1CF}qQQJ)s?$HB#A
zo~VO`T_W-?>a*O<$*oO1M$Fw0xp2{{AgX
za$uE2Wa><~5_ybH6&nUM4>uT{H^on*S6Y3=;I<5GT2S%0G`OOkJ@X)b?2FHAQvN;y
zUnr;4(PRo*zoUWae3@sz717llwz8rbr+I}zh65L|o34>n`-SkAHq<9ikW5;?0x=89
zEz_82A6e#=k46p4*-*M$H#hexr#&GsI4|5Ne^Kg>~=cR=W@e}xB-EB)j{(u90k%H)#{Y{jqS+A2n4jj
zaVfsP=o?g&ql3fZV>rZAN7RP*as9LzK#rYTN;0gFoP1*8%ZABd7K>2^XC@2BalFA-
zn;ELv^1TmKJSz3CsjnwYr83!mX%JexhWhI7Mlf-K1tZRb;0Bo3`$0r=+wtKV`n>tk
zMA~iCvLojnejB$~62FL**E86OFr;6Uz=tZ%aXUMuN%?f$N$_Eis%`O{cIfidWNV;r
zdiC|D|7kMhQ}|&S5CFjKYxaZmy5iMj{%R~G%8NFeY{*{p<37ljWpWA5qG>_SjBNbP
z49zRKUa)3i(Q{(ik_wdpdM{ofRokS8*>=?kxC&d5h-1TPS#*5IsSB>zf=nyX(w_QX
zpse9^s;>2|yftM?$60cTt;H=?CM)lx^6vHzZdU2)wRp-nd{_XPiH6k*7N{ho5#emY
zIzJJtJF0fFiC~j9zy_i+a}&_=iV9;57vEbzTaNGw(vEhl9kSAx^+uFsl`aH8YoNZl
zH11(@;I|BST*D^DZ;Dd>X##;vqr!Z(6p$*S*gcB`!Z1=6n&Upg;s)a*c&+CScKmENHv$fv&KV;0dwE&gz%{(@I^r7`Q4Duo33m69Q_!T
z@NI%#19%47mWs!SxRg`LkClcymom@i?Ras{Z3((#M;lSO0
z?1K#I1@{stVTdCpr{Sb1V&anjBz-9@_)|<77aMWpYeaac3Ei!3jU37d|H7lV_fC2)
zTDfE{;aik*3O5Q^>UTyZCb`p}Y;&b#9(MRbQ-EAT&WH8bep*cUhR7v%P
ztJ~b~w1Q;&@?}#sv^C$9OV)PGNvdr)shZi#_I&$k`BH0lfOdd<_dymtV&Lo0TqjkIKX7^y-8_%iJnuU90UPCQi8IPpGTqVs^FvfR|3-cS08yHzRyy+O_H5X-
z7vJR37!~#S%>;ZwSb-*cepwc&m{1xm&(rCyx^1Q<)b%Y(?#rtsK1
z5&LmS((C=Pt0Iitvq4kWXc!YszdTEOhWk1(g@Z8|xn;Fz0yCUqS!-*dkMW^13p|X7
zW~q7an!vP3qKI)bxt6mSSspWm!M84AXlEI$&5v>1$>R*=F8v$sF9$dzP?O9B8fB6T
zR>9@z{3pgppAv^}MKjm4XN}UHW^_EkL3K7#y#f*nMT~NIVxt?NFoK-sz82Y^X$C8p
zcZ}qV&FYb;3v~BAm_}nBSZSi8CQKPZUD)2`tORAFWGa)r3*9KpY9!=F^!A_OFtEuD
z8Ki^Fm;x|LUAS7TS;4s1b-*6#@=VGubZdc2ggD#EaK*z{MH5JI~A
zrU~Gogmz~faI<>cCh#bT7Ba`D9-@=0tKiF~$rF$m_Ix-3xP#yWDfu5R5`x#bKXN0o
z)LMRtgbBw?ChmzKdl{QVdrU8g^b1Id6gr}x?56On-=aJhvuj+_<>eJ2(&+b`C6uCMvvy(=f{;-Z?gv+sS+tLQqlbO^D+;5vZe
zykzp;Gtm@BRrPAB*L*$nL%NFw2l1I~B}Fu}a1Tsr!!VA9K>_&ke$(#bMfC#ZN9mpP
zNXl~YbPLtxRqoGaKVfg`;aa*v%6}Tszoec`kMU~HM;zS(u6u<}Z2lAW+&hkyXPw8cz{vswhb4F{jN=N~tEB&+i{N!C+Y>#0E-;l2B`_k|v
zLsh->4)yN~rNdjnIyoIzIxyXk-zohtZK|-_4SH5C?Sv+}1CzAB
zja6PYu~Hn-a$r^gfolWFN;JmhF}FZHjAC`4*dT(Ysa-)z-x3-O?9_L{JIIFWEQwB)
zhLYAT1WvM}4C?pAj@clpO6Mdcqm@{{JBe4Id6*9*DF+N0$ammI8emLrYrC#6lC{-(
zk-5fpQ}WG9^~w<^wpk1%Y^>U?tk)`oc}8bm!owb6B2t4MQyaq&b?uPk%f9I4#7`$u
z4T%QI5*de`@5WTs^c+T)M+6hepHw8!dxzNL&$To!}7lP?FV4qs#jwuEaZ4k$rxLjruLelVl3|+j{=6c4~7{
z$QhGS?DWzY1Q(qARymBviqGEu_JwZt0uqbb<-MP-0y!a1i|6yGBD<)^frbLO3taGMmbpR)fU?+Tfi
z1G9b$0IU-JR%ZG=?{YFTwl=2!eP;a4Wgn?8N8pH}`0(|=j1M=J#Iodk>ZzJ2V81T2
zZmtr-_sE-(^6<#-ak88x#`M2P_{<-A+Aq1bac$;}I;n^r7nXwDPnvtFmf2p9CfsSxP3)v3o5|Yls-vHgl?o_UW^cmbr(RSlTGV
zCe^+>4RQ0~?EAYeBhJ4
zx8x-nH%}(49V}OJ;i!5Zrtb?B6|hhQa)8rM8k@89ZN(2)jUTOBrb0Vh3V{~-gH%wI1J+C5SS_McalEm83lurb=^{p|l?fo<%mB7^@tQ+Q
z^>a+t5uT$!DIQt(YOV-_cW*Us&s+f-H9xTvHoHKcbElUfY(5BsXColbp;Lr_q#oS>
z1CgnCPz?baPQaBpdkm-PgN!Q$d~d0HN3*aWyH&HKHREpK(zYdxcYie_02re$u1Km1
z$JD22w-07=$&s_?E5gXJ0$!liUZs1}=MC{(ESuS89ZlkJEqHXx8D=*(|mhSj3Mhydp%P^+vt@hLcV<1=J{|+&?@|L|L|3p@5!+B
z%B`8$C~ePQ@#ANT8n0#o|7mz}RZUI%L{Lq98FzK#b%JAFyZ^5F4<
z;M$_-n}3j(GLQnn;zHQIJ&cV6tsSG8&}n_0qxOsuNU#U9r6tg)!ta?^Q0N=2-{=O%
zcNo3raim^LS{NGtX_O*zGE$uv))aD9Xn6`I1A_RJ1P5LIqsIXmRdToqz5qp(lnkYC
z^%jOGddf?tFmG0fYvlWyW7zwGZ=VVk$xz*`I2FWeWKk=I2FVDM7bZv6A%5%*J0B2n
z*2da_7#yMbh=vt55t?_mO0#ji5+L>Hn+-e5%$!$nU)wqZwb0#98*RvbqJKPX*
zR{fzj*&c{A$=I0UFS~m#0FE}qA&*ALn?~yiDfyLQPfBUN5e815B{3dj7TJY)1Mut`
zrVa$2@gIS?^J9fbRIu^H*l2FlF$E4rs8yL{9_;Q$=Imj8jJ}3
z>pB+E-g5yW%)O`nelzgi1YX|w>v!Bk@;WbN-U4Cd+{4skWH3#16#i;Gji!13&8%a_uFSXX>VP!H@
z9i#{5+k(7D6M5Ht3XbeC7%=31e#d%AQZQ8&o99zqJL-HwN@5jimO4f0CtYMbtSV|K
zv1opIC^DJVZY?pUvrHM%=)JnWlD9~y^7I?bKRVybkgclTW-f@t8$cs@EBMh1vUy^-
zx8mEPd?Q7JIT)S6tp-antyY4*bN~|_Uk=mz@H21?A=c&gNZ}4H6tj*uPLY>YC&h7&
z&CaXlDY)nAEBhtWN~+ANS?yI?H(euKPK8F%Hg$t1%fT%tAtH@Kjp+Ont)Gy4P3n*l
zLXb!MmT_9DOpn~I;R4J7@6Ha+|~A`{DICeK$Y?(LDS
zTIWQ2x*wBG2?J6{z9=5YV0n4m5;N(2iUmX3$9S~XF#6xWfCATp8JnRoR?O>`=qG2=
zCv%frCD0BR<-#W{eBd>U2_5LzDt<%)X+M~1m(N4%cQf@wl(C0IGkQv+uW<{uIP0T#
zz?)E4KX&?v8L1lyB36bv1|4R`iv|Qr#1VuAO%OnC51FHvqhg0kAL=?xpc@D&8AqO+1lV(s~
ztVBf{=V2?aGR+8_})vy_Rln;?Q~e*s=xrAHTzBFL!r
z@|rH4oJqJY5o_Sgqkh{nCQ3wPyhifaNgqR}Ua^6H`D}gu?SQ+4qw!!o^{nE35xSP_
znWb|dkU;vmus{`=J4<(EX69QHEEF7KvEES85veaCU^=#YSl
zWh}ny@j|G#Exm+h(F=(t)(qburFHT0mJ4hA-CHnf+>hnQJ+u;7GJ|*$fbL+*mRRLa
zuQB+aRgtPQB_seW0O0xhDTVoGRm9QE*x1RD{`bf4Av!Hy(>8|zIcQaN;l=k+CK6^G
z7^Ea#J#n3Y|&ZZ`B5hb6@_DLF`Q;fP#}7(zw7Am^Zv?QlqJB^H*f|m_pEh@$~hZsWVzpI
zd?S8AZG?Aafz>(vjSd36`G-x)!EZZNWE6rnw))A9N#IE$R;)G8kd>L=`|h+!xIgBb
z+r!B?HqEV&l8jEGAR*o}DUzV(hegv0APT0itq6gkw}9kEewGWU%8AoqhKub=W9R
zT~K5nf13R$F7XVAyWLfiyC^;98+qpLciyc0l6CQ(%gtHKA2nGW*BF>H?DKkyFE77O
zX&S%Rw4M^%3C07fHaQ|l`Hk<0X(ss~&3%DyTq2}n>U?x1n7bNc?Q3M0N4H~Ag%I5^
zS6f0*7oLE(td9!7DaT%M@7090}7@!4h=IWG;yOb
zphl=3imdrT^spobwG)Fyg_&k(@+}d+XawH5R7j>F7GCNzmU*c@J~PSkUd(&uNs$ov
z6h9lqeq$|M^B^Thm3~ZI)qZz-aX4_0a+TFYl|&L0NOhpfQQhOSvz@x==jq0h_@M)>
znVg{m?HR&qrIw9G748F8n?O5MjPt{BB5$MIAz3YKgyW6ZQ+RQ
zHJZ<)#2|<)HdA8o5T9TMpC9={E+Fv6kbYzk>^XU7o;>n@+I@R4hQ#8@6{A61Ip*E6
zAKh^iVQ=rrlgy?bV2jX7(?s0v!R*HF0`hfa<}9Z9(gbmHywzH{(jttQXzuhD1~18E
zEk_t}ws%p&=!^S3@OykaO~3=C{cP_%%~BLiFJWx?oh4eqncO2+fuY?IgsPmEV_-Hj
z@Y{U04^I*wK94iP{bw?Zi8gAx;r4*04d3)}^0VI-Q>
z=>nuS6V>;Z0cIOFnp47V%0FC1aX?YPju@ASP{?882JO+wh3aH7lxPQh@Cg&N6FR-d
zLvJ2$_6kuR&k$y{70u45Cz03ZMnS6~VHBW1nPAd(0)~CzB#rI}6L!R$-lL5TJpm8f
z`+)K}TKMup*`@sF(ae-x<1qp1jMI2ifIJxY%#kpwOF;#mB}}U8KB|@hEGVOpa-y|$
zxp%EQ;T;<*mmihtA*eB;5C7h;So0y4+X=1+n3*S$HBXmPajrzACz>eYf+MBRU4-J4
zP2E9OG`>+7eiOxM;1elhgcYVPt2O~=H9y@*#$K`AZ`I#sG$2aeft)I#jl7)4wg;xk
z4JsZ#i=XmlUkrUGx`D?TnK5^^${9n|HAXr3>f9kv@abbkR%c_A!&=%KzeuUk52*`1
zYj(h7DwPc10N)HRSPSWtWSZOWq1k~_%w{!3elqiie$Od0?B*fi@F^2KZF3-0431U*
zL;$t|8S~CtCIx@Y;#@+6LBC6u2uLRGOZfqu99@jx)(`US>_#S$~9`r+Q-aC-c1zh4A@hf`zR7se69;HuM@q^R*J?O0xkyq@qWRBL8
zq4GVl8pTNy*yV(v+@vMDopCUT^m>rech!31Q}BI?M!oQYEEph|@`&-1N--F7$Adm_
zBTU9POkJvqB?Xf*wxX0^izCf$nEP7f)k(>M@^)4Me0IMZlf_JweEhw)anq-L-hTdk
zsY=tZ+f1cqulw==P(*qzH3p7!)tvZ=LuH4O73mrs8;acpGH5i4dk6-!gvwRZezz|v
z&Ab5zCrzwPQ3#$QI2O0CiNmo<5rDS1l@ui75C?~lFX3XvGQb@^nRODKO;*;DLmDI*
zP{+dBP?Vl@Oj%S@wA^=@(D7tPsF*)jO=>Tf%AqDNWutCY0FDzL_k^AOLwiecUavF-vGGMxPE2+nYt1OFhUXZXT?&CWKG)?I5
z5g^E=svu;1&^79)P#yB%AVCy*)<_Rv({V++(EGuhJ|y5ip%6maHMPXxdl4FWxqoJta$;8nQeG|_Rk
zlOCDfC%ss<@yG@Zgp>+{^h64SGBs
z#?1)8h?gWekx@UXCiF(QCN#xPAiNt~G4@&@oy8N6Ok_u*OxT@rlns#)&mT8h*eg-k
zJ6I&*RBXVVH7%Dw%O*Ua3H01G*DE{8-p*$Zn||6$*o!Y$xJanPfejhT!*IVsECa
zJ?Bjw@La0Tir=){;Pww6BK?Ih$H_o>vFWI~v<&oe66M
z%PYy)E732w$oKfVPi$tRtQLcA;o743AH#LXm*^_BsF@&XY5Yi-0Gf1^i#!>}jerS1
zcowDnIThCDPNZCFWJSrxsKDvCtt39KF8CE!gb!!)-$_t)Rj-XY3FggeT?y%J-sF%C
z3^muo_eH%a=pAa??B;os+|il28-{%1n{STSNuWx9ck}g+TFCEF*l9u%(Vrh4YOPpm
z2Vj8q0f$sx&JFq_KS)nF1m3W@^_u|3OxeQeX^}I1uH<=PnWnv5eX+V&Mc%Hrni(~^
z1H^YM@x#8)zC(lQJgHiKq()b^;M)e~cOWxPgyy|QF75KX1wU_2-rte^E
zr0C>eZe#j;9o{KUKoWuxIp|dIMW}z-M)Oq4wVF$icY?4SGPLIk8}tkMC7BuJ+b&
zb{u-cxJ+)#2vZ%R|GK>u80x$ztjJ{SL8KX9`abc*GmXilR@+#{Q6C$o
zi|%zHeDL7Ue12^83y^c-S0*U+jUkGC?|OkI431NX-HAZjhbbuWks(gvr1*3)Xq0Qe
z0-rqdr*U#f*;xA|W>V`0=IhwhqSs;%WV_$^B}F
zk#V|`T@1jU)i-TI10G+V=ZymEAQE{wflG3pa0AEaR&jB{OTF2#vdx5;UaKAgL8
zo^CPDKVf=dOJNg)8DaSuM}ij6%$n4L;DBnPXa%(@a+xG*HMHgB4l~I42s~KJzV_f&
zYvO|_GeEObEg^l4aTUoDXMwZx13R}0j)v-_U`z8=}XMp|5l&Ejv&9EfYg
z;FG8{8_HmQ75Q8oceP4@y}PLq7dp9Qa5wfqhNq)2#k_|EoZA?rU
zV3`xts5nwD6teQ;hI5r6<`ss^z_c^_636JGo&D`}10R&v
z;&CS(C@+4Jn!uPwklc4&trLxatHp*0g^B?8IW3PYWM8z!Pwb^#XS7l@7{CTg?{QB5R0uG~S^XGh=VA+To&X7}VL<
zkkneP9zpPJ>*n|8?rzoneA|!B)v0l(Up3oaedZq?6$~8o8m#^^`}N=V_wVO_;KIsD
z{$0V}d3ygA{O$Sq6(oOQ?fokFcka?(1)E<9f&YId)317dCA9ph>GYN2@(aD?SK(h<
z=zj_~!v7J^|F5C`tLU!{oj*lQQGOTwwY~GJf?sP+e=2x`{eLh1j|$YUT7Jzz{?x)v
z`o|7_%}9RL@b|F#r#t{qL
+ this: MarkovAgent =>
override protected val pdClassTag: ClassTag[ApparentPower] =
classTag[ApparentPower]
override val alternativeResult: ApparentPower = ZERO_POWER
@@ -107,8 +93,9 @@ protected trait MarkkovAgentFundamentals[
* A child of [[ParticipantModelBaseStateData]] that reflects the behaviour
* based on the data source definition
*/
- override def determineModelBaseStateData(
- inputModel: InputModelContainer[MarkovInput],
+
+ override def determineModelBaseStateData(
+ inputModel: InputModelContainer[MarkovData],
modelConfig: MarkovRuntimeConfig,
services: Iterable[SecondaryDataService[_ <: SecondaryData]],
simulationStartDate: ZonedDateTime,
@@ -119,9 +106,9 @@ protected trait MarkkovAgentFundamentals[
maybeEmAgent: Option[TypedActorRef[FlexResponse]],
): ParticipantModelBaseStateData[
ApparentPower,
- LD,
+ MarkovRelevantData,
ConstantState.type,
- LM,
+ MarkovModel,
] = {
/* Build the calculation model */
val model =
@@ -132,85 +119,30 @@ protected trait MarkkovAgentFundamentals[
simulationEndDate,
)
- /* Go and collect all ticks, in which activation is needed in addition to the activations made by incoming data.
- * Also register for services, where needed. */
- val lastTickInSimulation = simulationEndDate.toTick(simulationStartDate)
- val additionalActivationTicks = model match {
- case profileMarkovModel: ProfileMarkovModel =>
- activationTicksInOperationTime(
- simulationStartDate,
- MarkovProfileStore.resolution.getSeconds,
- profileMarkovModel.operationInterval.start,
- profileMarkovModel.operationInterval.end,
- )
- case _ =>
- SortedSet.empty[Long]
- }
-
- ParticipantModelBaseStateData[ApparentPower, LD, ConstantState.type, LM](
- simulationStartDate,
- simulationEndDate,
- model,
- services,
- outputConfig,
- additionalActivationTicks,
- Map.empty,
- requestVoltageDeviationThreshold,
- ValueStore.forVoltage(
- resolution,
- Each(
- inputModel.electricalInputModel.getNode
- .getvTarget()
- .to(PU)
- .getValue
- .doubleValue
- ),
- ),
- ValueStore(resolution),
- ValueStore(resolution),
- ValueStore(resolution),
- ValueStore(resolution),
- maybeEmAgent.map(FlexControlledData(_, self.toTyped[FlexRequest])),
- )
- }
-
- override def buildModel(
- inputModel: InputModelContainer[MarkovInput],
- modelConfig: MarkovRuntimeConfig,
- simulationStartDate: ZonedDateTime,
- simulationEndDate: ZonedDateTime,
- ): LM = {
- val operationInterval: OperationInterval =
- SystemComponent.determineOperationInterval(
- simulationStartDate,
- simulationEndDate,
- inputModel.electricalInputModel.getOperationTime,
- )
- val reference =
- MarkovReference(inputModel.electricalInputModel, modelConfig)
- buildModel(
- inputModel.electricalInputModel,
- operationInterval,
- modelConfig,
- reference,
- )
- }
-
- protected def buildModel(
- inputModel: MarkovInput,
- operationInterval: OperationInterval,
- modelConfig: MarkovRuntimeConfig,
- reference: MarkovReference,
- ): LM
-
- override protected def createInitialState(
+override protected def createInitialState(
baseStateData: ParticipantModelBaseStateData[
ApparentPower,
- LD,
+ MarkovRelevantData,
ConstantState.type,
- LM,
+ MarkovModel,
]
- ): ModelState.ConstantState.type = ConstantState
+ ): ModelState.ConstantState.type =
+ ConstantState
+
+ override protected def createCalcRelevantData(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ tick: Long,
+ ): MarkovRelevantData = {
+
+ MarkovRelevantData(
+ None,
+ )
+ }
/** Handle an active power change by flex control.
* @param tick
@@ -230,11 +162,11 @@ protected trait MarkkovAgentFundamentals[
tick: Long,
baseStateData: ParticipantModelBaseStateData[
ApparentPower,
- LD,
+ MarkovRelevantData,
ConstantState.type,
- LM,
+ MarkovModel,
],
- data: LD,
+ data: MarkovRelevantData,
lastState: ConstantState.type,
setPower: squants.Power,
): (ConstantState.type, ApparentPower, FlexChangeIndicator) = {
@@ -253,6 +185,34 @@ protected trait MarkkovAgentFundamentals[
(updatedState, result, flexChangeIndicator)
}
+ /** Partial function, that is able to transfer
+ * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
+ * into a pair of active and reactive power
+ */
+
+override val calculateModelPowerFunc: (
+ Long,
+ ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ ConstantState.type,
+ Dimensionless,
+ ) => ApparentPower =
+ (
+ _: Long,
+ _: ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ _,
+ _: Dimensionless,
+ )
+
/** Calculate the power output of the participant utilising secondary data.
* However, it might appear, that not the complete set of secondary data is
* available for the given tick. This might especially be true, if the actor
@@ -265,7 +225,7 @@ protected trait MarkkovAgentFundamentals[
*
* @param baseStateData
* The base state data with collected secondary data
- * @param maybeLastModelState
+ * @param lastModelState
* Optional last model state
* @param currentTick
* Tick, the trigger belongs to
@@ -274,12 +234,12 @@ protected trait MarkkovAgentFundamentals[
* @return
* [[Idle]] with updated result values
*/
- override def calculatePowerWithSecondaryDataAndGoToIdle(
+ override def calculatePowerWithSecondaryDataAndGoToIdle(
baseStateData: ParticipantModelBaseStateData[
ApparentPower,
- LD,
+ MarkovRelevantData,
ConstantState.type,
- LM,
+ MarkovModel,
],
lastModelState: ConstantState.type,
currentTick: Long,
@@ -341,73 +301,27 @@ protected trait MarkkovAgentFundamentals[
result.q.toMegavars.asMegaVar,
)
+ /** Update the last known model state with the given external, relevant data
+ *
+ * @param tick
+ * Tick to update state for
+ * @param modelState
+ * Last known model state
+ * @param calcRelevantData
+ * Data, relevant for calculation
+ * @param nodalVoltage
+ * Current nodal voltage of the agent
+ * @param model
+ * Model for calculation
+ * @return
+ * The updated state at given tick under consideration of calculation
+ * relevant data
+ */
override protected def updateState(
tick: Long,
modelState: ModelState.ConstantState.type,
- calcRelevantData: LD,
+ calcRelevantData: MarkovRelevantData,
nodalVoltage: squants.Dimensionless,
- model: LM,
+ model: MarkovModel,
): ModelState.ConstantState.type = modelState
}
-
-object MarkovAgentFundamentals {
- trait ProfileMarkovAgentFundamentals
- extends MarkovAgentFundamentals[
- ProfileRelevantData,
- ProfileMarkovModel,
- ] {
- this: MarkovAgent.ProfileMarkovAgent =>
-
- override def buildModel(
- inputModel: MarkovInput,
- operationInterval: OperationInterval,
- modelConfig: MarkovRuntimeConfig,
- reference: MarkovReference,
- ): ProfileMarkovModel =
- ProfileMarkovModel(
- inputModel,
- operationInterval,
- modelConfig.scaling,
- reference,
- )
-
- override protected def createCalcRelevantData(
- baseStateData: ParticipantModelBaseStateData[
- ApparentPower,
- ProfileRelevantData,
- ConstantState.type,
- ProfileMarkovModel,
- ],
- currentTick: Long,
- ): ProfileRelevantData =
- ProfileRelevantData(
- currentTick.toDateTime(baseStateData.startDate)
- )
-
- /** Partial function, that is able to transfer
- * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
- * into a pair of active and reactive power
- */
- override val calculateModelPowerFunc: (
- Long,
- ParticipantModelBaseStateData[
- ApparentPower,
- ProfileRelevantData,
- ConstantState.type,
- ProfileMarkovModel,
- ],
- ConstantState.type,
- Dimensionless,
- ) => ApparentPower = (tick, baseStateData, _, voltage) => {
- val profileRelevantData =
- createCalcRelevantData(baseStateData, tick)
-
- baseStateData.model.calculatePower(
- currentTick,
- voltage,
- ConstantState,
- profileRelevantData,
- )
- }
- }
-}
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
deleted file mode 100644
index 22a099eb3c..0000000000
--- a/src/main/scala/edu/ie3/simona/agent/participant/markov/MarkovAgent.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * © 2024. TU Dortmund University,
- * Institute of Energy Systems, Energy Efficiency and Energy Economics,
- * Research group Distribution grid planning and operation
- */
-
-package edu.ie3.simona.agent.participant.markov
-
-import edu.ie3.datamodel.models.input.system.MarkovInput
-import edu.ie3.simona.agent.participant.ParticipantAgent
-import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
-import edu.ie3.simona.agent.participant.markov.MarkovAgentFundamentals.ProfileMarkovAgentFundamentals
-
-import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
-import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.ParticipantInitializeStateData
-import edu.ie3.simona.config.SimonaConfig.MarkovRuntimeConfig
-import edu.ie3.simona.model.participant.CalcRelevantData.MarkovRelevantData
-import edu.ie3.simona.model.participant.ModelState.ConstantState
-import edu.ie3.simona.model.participant.markov.profile.ProfileMarkovModel
-import edu.ie3.simona.model.participant.markov.profile.ProfileMarkovModel.ProfileRelevantData
-import edu.ie3.simona.model.participant.markov.random.RandomMarkovModel
-import edu.ie3.simona.model.participant.markov.random.RandomMarkovModel.RandomRelevantData
-import edu.ie3.simona.model.participant.markov.{
- MarkovModel,
- MarkovModelBehaviour,
-}
-import org.apache.pekko.actor.{ActorRef, Props}
-
-object MarkovAgent {
- def props(
- scheduler: ActorRef,
- initStateData: ParticipantInitializeStateData[
- MarkovInput,
- MarkovRuntimeConfig,
- ApparentPower,
- ],
- listener: Iterable[ActorRef],
- ): Props =
- MarkovModelBehaviour(initStateData.modelConfig.modelBehaviour) match {
- case MarkovModelBehaviour.PROFILE =>
- Props(new ProfileMarkovAgent(scheduler, initStateData, listener))
- case unsupported =>
- throw new IllegalArgumentException(
- s"The markov agent behaviour '$unsupported' is currently not supported."
- )
- }
-
- final class ProfileMarkovAgent(
- scheduler: ActorRef,
- initStateData: ParticipantInitializeStateData[
- MarkovInput,
- MarkovRuntimeConfig,
- ApparentPower,
- ],
- override val listener: Iterable[ActorRef],
- ) extends MarkovAgent[
- ProfileRelevantData,
- ProfileMarkovModel,
- ](scheduler, initStateData, listener)
- with ProfileMarkovAgentFundamentals
-}
-
-/** Creating a markov agent
- *
- * @param scheduler
- * Actor reference of the scheduler
- * @param listener
- * List of listeners interested in results
- */
-abstract class MarkovAgent[MD <: MarkovRelevantData, MM <: MarkovModel[MD]](
- scheduler: ActorRef,
- initStateData: ParticipantInitializeStateData[
- MarkovInput,
- MarkovRuntimeConfig,
- ApparentPower,
- ],
- override val listener: Iterable[ActorRef],
-) extends ParticipantAgent[
- ApparentPower,
- MD,
- ConstantState.type,
- ParticipantStateData[ApparentPower],
- MarkovInput,
- MarkovRuntimeConfig,
- MM,
- ](scheduler, initStateData)
- with MarkovAgentFundamentals[MD, MM] {
-}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/CalcRelevantData.scala b/src/main/scala/edu/ie3/simona/model/participant/CalcRelevantData.scala
index 0e557be074..8b76d10fc6 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/CalcRelevantData.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/CalcRelevantData.scala
@@ -24,5 +24,4 @@ object CalcRelevantData {
*/
trait LoadRelevantData extends CalcRelevantData
- case class MarkovRelevantData
}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModel.scala
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
new file mode 100644
index 0000000000..af96f4de23
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
@@ -0,0 +1,5 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
\ No newline at end of file
diff --git a/src/main/resources/markov/appliances/average_hh.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/average_hh.csv
similarity index 100%
rename from src/main/resources/markov/appliances/average_hh.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/average_hh.csv
diff --git a/src/main/resources/markov/appliances/by_income.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_income.csv
similarity index 100%
rename from src/main/resources/markov/appliances/by_income.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_income.csv
diff --git a/src/main/resources/markov/appliances/by_inhabitants.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_inhabitants.csv
similarity index 100%
rename from src/main/resources/markov/appliances/by_inhabitants.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_inhabitants.csv
diff --git a/src/main/resources/markov/appliances/by_type.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_type.csv
similarity index 100%
rename from src/main/resources/markov/appliances/by_type.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_type.csv
diff --git a/src/main/resources/markov/appliances/load_ts.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/load_ts.csv
similarity index 100%
rename from src/main/resources/markov/appliances/load_ts.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/load_ts.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/dish_washer.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/dish_washer.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/dish_washer.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/dish_washer.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/dryer.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/dryer.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/dryer.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/dryer.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/freezer.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/freezer.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/freezer.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/freezer.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/fridge.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/fridge.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/fridge.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/fridge.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/lighting.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/lighting.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/lighting.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/lighting.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/pc.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/pc.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/pc.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/pc.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/stove.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/stove.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/stove.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/stove.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/telecommunication.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/telecommunication.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/telecommunication.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/telecommunication.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/television.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/television.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/television.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/television.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/video_recorder.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/video_recorder.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/video_recorder.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/video_recorder.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/washing_machine.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/washing_machine.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/washing_machine.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/washing_machine.csv
diff --git a/src/main/resources/markov/probabilities/switch_on_probabilities/water_heating.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/water_heating.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/switch_on_probabilities/water_heating.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/water_heating.csv
diff --git a/src/main/resources/markov/probabilities/usage_probabilities/usage_probabilities.csv b/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/usage_probabilities/usage_probabilities.csv
similarity index 100%
rename from src/main/resources/markov/probabilities/usage_probabilities/usage_probabilities.csv
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/usage_probabilities/usage_probabilities.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovHousehouldkey.Scala b/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovHousehouldkey.Scala
deleted file mode 100644
index d0fae5e5cb..0000000000
--- a/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovHousehouldkey.Scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * © 2020. TU Dortmund University,
- * Institute of Energy Systems, Energy Efficiency and Energy Economics,
- * Research group Distribution grid planning and operation
- */
-
-package edu.ie3.simona.model.participant.load.profile
-
-import java.time.ZonedDateTime
-import edu.ie3.datamodel.exceptions.ParsingException
-import edu.ie3.datamodel.models.profile.StandardLoadProfile
-import edu.ie3.simona.model.participant.load
-import edu.ie3.simona.model.participant.load.{DayType, profile}
-
-/** A key describing a MarkovHousehold profile, consisting of consumer type, a season and a
- * day type. Is used to store MarkovHousehold profile values for a single type.
- *
- * @param standardMarkovHousehold
- * a consumer type
- * @param season
- * a season
- * @param dayType
- * a day type
- */
-final case class MarkovHouseholdKey(
- standardMarkovHousehold: StandardLoadProfile,
- season: Season.Value,
- dayType: DayType.Value,
-)
-
-case object MarkovHouseholdKey {
-
- /** Creates a MarkovHousehold key from given csv header, i.e. "g0SSo"
- *
- * @param headerKey
- * the header
- * @return
- * a MarkovHousehold key
- */
- def apply(headerKey: String): MarkovHouseholdKey = {
- val regex = "([a-z][0-9])([A-Z][a-z])([A-Z][a-z])".r
-
- headerKey match {
- case regex(MarkovHouseholdKey, seasonKey, dayTypeKey) =>
- MarkovHouseholdKey(MarkovHouseholdKey, seasonKey, dayTypeKey)
- case _ =>
- throw new RuntimeException(
- s"Provided MarkovHousehold profile header key $headerKey is malformed. It has to be of the form ${regex.pattern} e.g. 'g0WiSu'."
- )
- }
- }
-
- /** Creates a MarkovHousehold key from three Strings describing a MarkovHousehold
- *
- * @param MarkovHousehold
- * Key describing the MarkovHousehold
- * @param season
- * Key describing the season
- * @param dayType
- * Key describing the day type
- * @return
- * a MarkovHousehold key
- */
- def apply(
- MarkovHousehold: String,
- season: String,
- dayType: String,
- ): MarkovHouseholdKey = {
- try {
- new MarkovHouseholdKey(
- StandardLoadProfile.parse(MarkovHousehold),
- Season(season),
- DayType(dayType),
- )
- } catch {
- case e: ParsingException =>
- throw new IllegalArgumentException(
- s"Cannot parse '$MarkovHousehold' to a now StandardLoadProfile.",
- e,
- )
- }
- }
-
- /** Creates a MarkovHousehold key from a consumer type value and a ZonedDateTime
- * object
- *
- * @param MarkovHousehold
- * The standard MarkovHousehold
- * @param time
- * The time
- * @return
- * a MarkovHousehold key
- */
- def apply(
- MarkovHousehold: StandardLoadProfile,
- time: ZonedDateTime,
- ): MarkovHouseholdKey = {
- new MarkovHouseholdKey(
- MarkovHousehold,
- profile.Season(time),
- load.DayType(time.getDayOfWeek),
- )
- }
-}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModelBehaviour.Scala b/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModelBehaviour.Scala
deleted file mode 100644
index c8faaceb33..0000000000
--- a/src/main/scala/edu/ie3/simona/model/participant/markov/MarkovModelBehaviour.Scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * © 2024. TU Dortmund University,
- * Institute of Energy Systems, Energy Efficiency and Energy Economics,
- * Research group Distribution grid planning and operation
- */
-
-package edu.ie3.simona.model.participant.load
-
-import edu.ie3.simona.util.ParsableEnumeration
-
-/** Enumeration to describe all eligible load model behaviours
- */
-case object MarkovModelBehaviour extends ParsableEnumeration {
- val HOUSEHOLD: Value = Value("household")
-}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/Season.scala b/src/main/scala/edu/ie3/simona/model/participant/markov/Season.scala
deleted file mode 100644
index 23cfb4e90e..0000000000
--- a/src/main/scala/edu/ie3/simona/model/participant/markov/Season.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-import java.time.LocalDateTime
-import java.time.temporal.ChronoUnit
-
-object Season extends Enumeration {
- type Season = Value
- val SPRING, SUMMER, AUTUMN, WINTER = Value
-
- def getSeason(dateTime: LocalDateTime): Season = {
- val month = dateTime.getMonthValue
- if (month >= 3 && month <= 5) SPRING
- else if (month >= 6 && month <= 8) SUMMER
- else if (month >= 9 && month <= 11) AUTUMN
- else WINTER
- }
-}
-
-object DayType extends Enumeration {
- type DayType = Value
- val WEEKDAY, SATURDAY, SUNDAY = Value
-
- def getDayType(dateTime: LocalDateTime): DayType = {
- val weekday = dateTime.getDayOfWeek.getValue
- if (weekday < 6) WEEKDAY
- else if (weekday == 6) SATURDAY
- else SUNDAY
- }
-}
-
-case class TimeInterval(start: LocalDateTime, end: LocalDateTime) {
- def isWithin(time: LocalDateTime): Boolean = {
- start.compareTo(time) <= 0 && end.compareTo(time) > 0
- }
-}
-
-object TimeInterval {
- def getOperationInterval(start: LocalDateTime, duration: Long): TimeInterval = {
- val end = start.plus(duration, ChronoUnit.MILLIS)
- TimeInterval(start, end)
- }
-}
-
-object Main extends App {
- val now = LocalDateTime.now()
-
- println("Current Season: " + Season.getSeason(now))
- println("Current Day Type: " + DayType.getDayType(now))
-
- val operationInterval = TimeInterval.getOperationInterval(now, 3600000) // 1 hour duration
- println("Is within operation interval? " + operationInterval.isWithin(now))
-}
-
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/TypeDay.Scala b/src/main/scala/edu/ie3/simona/model/participant/markov/TypeDay.Scala
deleted file mode 100644
index c97cdbdd67..0000000000
--- a/src/main/scala/edu/ie3/simona/model/participant/markov/TypeDay.Scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * © 2024. TU Dortmund University,
- * Institute of Energy Systems, Energy Efficiency and Energy Economics,
- * Research group Distribution grid planning and operation
- */
-
-package edu.ie3.simona.model.participant.Markov.profile
-
-import java.time.ZonedDateTime
-
-import edu.ie3.util.TimeUtil
-
-// needs to be imported for max function
-import scala.math.Ordering.Double.IeeeOrdering
-
-/** Stores a slice of Markov Househould data, that comprises a whole day (96 quarter
- * hours). The data describes a typical day, that can unequivocally be
- * identified by a [[MarkovHousehouldKey]].
- *
- * @param values
- * 96 quarter-hour values for this Markov Household,
- */
-final case class TypeDayProfile(private val values: Array[Double]) {
- if (values.length != 96)
- throw new IllegalArgumentException(
- "You may only instantiate type day parameters with 96 values."
- )
-
- /** Returns a value for given time. If time is not a 15 min step, it is
- * rounded up to the next 15 min slice.
- *
- * @param time
- * the time
- * @return
- * the load value
- */
- def getQuarterHourEnergy(time: ZonedDateTime): Double = {
- val quartH = TimeUtil.withDefaults.getQuarterHourOfDay(time)
- values(quartH)
- }
-
- /** @return
- * the maximum value of this MarkovHousehold
- */
- def getMaxValue: Double = values.maxOption.getOrElse(Double.PositiveInfinity)
-}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/markov/appliance_category.scala b/src/main/scala/edu/ie3/simona/model/participant/markov/appliance_category.scala
deleted file mode 100644
index 375651aca6..0000000000
--- a/src/main/scala/edu/ie3/simona/model/participant/markov/appliance_category.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object ApplianceCategory extends Enumeration {
- type ApplianceCategory = Value
- val DISH_WASHER, WASHING_MACHINE, DRYER, STOVE, FRIDGE, FREEZER, TELEVISION, VIDEO_RECORDER, PC, TELECOMMUNICATION, LIGHTING, WATER_HEATING, OTHER_LOAD = Value
-}
-
diff --git a/src/test/scala/edu/ie3/simona/agent/participant/MarkovAgentModelCalculationSpec.scala b/src/test/scala/edu/ie3/simona/agent/participant/MarkovAgentModelCalculationSpec.scala
new file mode 100644
index 0000000000..af96f4de23
--- /dev/null
+++ b/src/test/scala/edu/ie3/simona/agent/participant/MarkovAgentModelCalculationSpec.scala
@@ -0,0 +1,5 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
\ No newline at end of file
diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/MarkovModelSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/MarkovModelSpec.scala
new file mode 100644
index 0000000000..af96f4de23
--- /dev/null
+++ b/src/test/scala/edu/ie3/simona/model/participant/load/MarkovModelSpec.scala
@@ -0,0 +1,5 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
\ No newline at end of file
From 27bf1b5f677c3d688936acd8b2dd2c0633542c97 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Wed, 3 Apr 2024 17:58:10 +0200
Subject: [PATCH 09/36] Fixed Errors
---
.../agent/participant/load/markov/MarkovAgent.scala | 2 +-
.../load/markov/MarkovAgentFundamentals.Scala | 9 ++++-----
.../model/participant/load/markov/MarkovData.scala | 8 ++++++++
.../model/participant/load/markov/MarkovModel.scala | 8 +++++++-
4 files changed, 20 insertions(+), 7 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
index f5b75cbc4b..ee729b39f7 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
@@ -8,7 +8,7 @@ package edu.ie3.simona.agent.participant.load.markov
import edu.ie3.simona.agent.participant.ParticipantAgent
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
-import edu.ie3.simona.agent.participant.load.LoadAgentFundamentals
+import edu.ie3.simona.agent.participant.load.markov.MarkovAgentFundamentals
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.ParticipantInitializeStateData
import edu.ie3.simona.config.SimonaConfig.MarkovRuntimeConfig
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
index 1e027c04e1..601cfbb03b 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
@@ -28,11 +28,10 @@ import edu.ie3.simona.event.notifier.NotifierConfig
import edu.ie3.simona.exceptions.agent.InconsistentStateException
import edu.ie3.simona.model.SystemComponent
import edu.ie3.simona.model.participant.ModelState.ConstantState
-import edu.ie3.simona.model.participant.MarkovModel.MarkovRelevantData
-import edu.ie3.simona.model.participant.load.markov{
- MarkovModel,
- MarkovData,
-}
+import edu.ie3.simona.model.participant.load.markov.MarkovModel.MarkovRelevantData
+import edu.ie3.simona.model.participant.load.markov.MarkovModel
+import edu.ie3.simona.model.participant.load.markov.MarkovData
+
import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage.{
FlexRequest,
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala
index e69de29bb2..fdf6a18fe5 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala
@@ -0,0 +1,8 @@
+/*
+ * © 2020. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.model.participant.load.markov
+abstract class MarkovData
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
index af96f4de23..761abbb0ba 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
@@ -2,4 +2,10 @@
* © 2024. TU Dortmund University,
* Institute of Energy Systems, Energy Efficiency and Energy Economics,
* Research group Distribution grid planning and operation
- */
\ No newline at end of file
+ */
+
+package edu.ie3.simona.model.participant.load.markov
+
+import edu.ie3.simona.model.participant.load.markov.MarkovData
+
+abstract class MarkovModel
\ No newline at end of file
From 3ff337b7bb104645ce1aa81b1a249c5c1124363d Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Thu, 4 Apr 2024 17:01:53 +0200
Subject: [PATCH 10/36] MarkovAgent Basics Fixed
---
.../load/LoadAgentFundamentals.scala | 3 +-
.../participant/load/markov/MarkovAgent.scala | 58 ++++++++++++-------
.../load/markov/MarkovAgentFundamentals.Scala | 33 ++++-------
.../edu/ie3/simona/config/SimonaConfig.scala | 2 +-
.../participant/load/LoadModelBehaviour.scala | 1 +
.../participant/load/markov/MarkovData.scala | 8 ---
.../participant/load/markov/MarkovModel.scala | 31 +++++++++-
.../MarkovAgentModelCalculationSpec.scala | 5 --
.../participant/load/MarkovModelSpec.scala | 5 --
9 files changed, 82 insertions(+), 64 deletions(-)
delete mode 100644 src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala
delete mode 100644 src/test/scala/edu/ie3/simona/agent/participant/MarkovAgentModelCalculationSpec.scala
delete mode 100644 src/test/scala/edu/ie3/simona/model/participant/load/MarkovModelSpec.scala
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/LoadAgentFundamentals.scala b/src/main/scala/edu/ie3/simona/agent/participant/load/LoadAgentFundamentals.scala
index 79df65997c..01e1639be8 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/LoadAgentFundamentals.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/LoadAgentFundamentals.scala
@@ -511,13 +511,14 @@ object LoadAgentFundamentals {
operationInterval: OperationInterval,
modelConfig: LoadRuntimeConfig,
reference: LoadReference,
- ): RandomLoadModel =
+ ): RandomLoadModel = {
RandomLoadModel(
inputModel,
operationInterval,
modelConfig.scaling,
reference,
)
+ }
override protected def createCalcRelevantData(
baseStateData: ParticipantModelBaseStateData[
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
index ee729b39f7..99456f64fd 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
@@ -6,35 +6,52 @@
package edu.ie3.simona.agent.participant.load.markov
+import edu.ie3.datamodel.models.input.system.LoadInput
+import edu.ie3.datamodel.models.result.system.SystemParticipantResult
import edu.ie3.simona.agent.participant.ParticipantAgent
+import edu.ie3.simona.agent.participant.data.Data
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
-import edu.ie3.simona.agent.participant.load.markov.MarkovAgentFundamentals
-import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
+import edu.ie3.simona.agent.participant.data.secondary.SecondaryDataService
+import edu.ie3.simona.agent.participant.statedata.{BaseStateData, ParticipantStateData}
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.ParticipantInitializeStateData
-import edu.ie3.simona.config.SimonaConfig.MarkovRuntimeConfig
+import edu.ie3.simona.agent.state.AgentState
+import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
+import edu.ie3.simona.event.notifier.NotifierConfig
+import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
import edu.ie3.simona.model.participant.ModelState.ConstantState
-import edu.ie3.simona.model.participant.load.markov.MarkovModel
-import edu.ie3.simona.model.participant.load.markov.MarkovData
+import edu.ie3.simona.model.participant.load.markov.{MarkovModel, MarkovRelevantData}
+import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage
+import edu.ie3.util.scala.quantities.ReactivePower
+import org.apache.pekko.actor.{ActorRef, FSM, Props, typed}
+import squants.{Dimensionless, Power}
+
+import java.time.ZonedDateTime
+import java.util.UUID
+
-import org.apache.pekko.actor.{ActorRef, Props}
object MarkovAgent {
def props(
scheduler: ActorRef,
initStateData: ParticipantInitializeStateData[
- MarkovData,
- MarkovRuntimeConfig,
+ LoadInput,
+ LoadRuntimeConfig,
ApparentPower,
],
listener: Iterable[ActorRef],
): Props =
Props(
- new MarkovAgent
- scheduler,
- initStatedata,
- listener,
- )
- }
+ new MarkovAgent(
+ scheduler,
+ initStateData: ParticipantInitializeStateData[
+ LoadInput,
+ LoadRuntimeConfig,
+ ApparentPower,
+ ],
+ listener,
+ )
+ )
+}
/** Creating a load agent
*
@@ -43,11 +60,11 @@ object MarkovAgent {
* @param listener
* List of listeners interested in results
*/
-class LoadAgent(
+class MarkovAgent(
scheduler: ActorRef,
initStateData: ParticipantInitializeStateData[
- MarkovData,
- MarkovRuntimeConfig,
+ LoadInput,
+ LoadRuntimeConfig,
ApparentPower,
],
override val listener: Iterable[ActorRef],
@@ -56,10 +73,11 @@ class LoadAgent(
MarkovRelevantData,
ConstantState.type,
ParticipantStateData[ApparentPower],
- MarkovData,
- MarkovRuntimeConfig,
+ LoadInput,
+ LoadRuntimeConfig,
+ MarkovModel,
](scheduler, initStateData)
- with MarkovAgentFundamentals{
+ with MarkovAgentFundamentals {
/*
* "Hey, SIMONA! What is handled in ParticipantAgent?"
* "Hey, dude! The following things are handled in ParticipantAgent:
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
index 601cfbb03b..e5865391b1 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
@@ -6,37 +6,26 @@
package edu.ie3.simona.agent.participant.load.markov
+import edu.ie3.datamodel.models.input.system.LoadInput
import edu.ie3.simona.agent.ValueStore
import edu.ie3.simona.agent.participant.ParticipantAgent.getAndCheckNodalVoltage
import edu.ie3.simona.agent.participant.ParticipantAgentFundamentals
-import edu.ie3.simona.agent.participant.data.Data.PrimaryData.{
- ApparentPower,
- ZERO_POWER,
-}
+import edu.ie3.simona.agent.participant.data.Data.PrimaryData.{ApparentPower, ZERO_POWER}
import edu.ie3.simona.agent.participant.data.Data.SecondaryData
import edu.ie3.simona.agent.participant.data.secondary.SecondaryDataService
-import edu.ie3.simona.agent.participant.statedata.BaseStateData.{
- FlexControlledData,
- ParticipantModelBaseStateData,
-}
+import edu.ie3.simona.agent.participant.statedata.BaseStateData.{FlexControlledData, ParticipantModelBaseStateData}
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.InputModelContainer
import edu.ie3.simona.agent.state.AgentState
import edu.ie3.simona.agent.state.AgentState.Idle
-import edu.ie3.simona.config.SimonaConfig.MarkovRuntimeConfig
+import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
import edu.ie3.simona.event.notifier.NotifierConfig
import edu.ie3.simona.exceptions.agent.InconsistentStateException
import edu.ie3.simona.model.SystemComponent
import edu.ie3.simona.model.participant.ModelState.ConstantState
-import edu.ie3.simona.model.participant.load.markov.MarkovModel.MarkovRelevantData
-import edu.ie3.simona.model.participant.load.markov.MarkovModel
-import edu.ie3.simona.model.participant.load.markov.MarkovData
-
+import edu.ie3.simona.model.participant.load.markov.{MarkovModel, MarkovRelevantData}
import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
-import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage.{
- FlexRequest,
- FlexResponse,
-}
+import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage.{FlexRequest, FlexResponse}
import edu.ie3.simona.util.SimonaConstants
import edu.ie3.simona.util.TickUtil._
import edu.ie3.util.quantities.PowerSystemUnits.PU
@@ -59,8 +48,8 @@ protected trait MarkovAgentFundamentals
MarkovRelevantData,
ConstantState.type,
ParticipantStateData[ApparentPower],
- MarkovData,
- MarkovRuntimeConfig,
+ LoadInput,
+ LoadRuntimeConfig,
MarkovModel,
] {
this: MarkovAgent =>
@@ -94,8 +83,8 @@ protected trait MarkovAgentFundamentals
*/
override def determineModelBaseStateData(
- inputModel: InputModelContainer[MarkovData],
- modelConfig: MarkovRuntimeConfig,
+ inputModel: InputModelContainer[LoadInput],
+ modelConfig: LoadRuntimeConfig,
services: Iterable[SecondaryDataService[_ <: SecondaryData]],
simulationStartDate: ZonedDateTime,
simulationEndDate: ZonedDateTime,
@@ -293,7 +282,7 @@ override val calculateModelPowerFunc: (
dateTime: ZonedDateTime,
result: ApparentPower,
): SystemParticipantResult =
- new MarkovResult(
+ new LoadResult(
dateTime,
uuid,
result.p.toMegawatts.asMegaWatt,
diff --git a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
index 7e386465ef..c64c62ca3f 100644
--- a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
+++ b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
@@ -2955,4 +2955,4 @@ object SimonaConfig {
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/LoadModelBehaviour.scala b/src/main/scala/edu/ie3/simona/model/participant/load/LoadModelBehaviour.scala
index 0a9a057808..851a1d76c1 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/LoadModelBehaviour.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/LoadModelBehaviour.scala
@@ -14,4 +14,5 @@ case object LoadModelBehaviour extends ParsableEnumeration {
val FIX: Value = Value("fix")
val PROFILE: Value = Value("profile")
val RANDOM: Value = Value("random")
+ val MARKOV: Value = Value("markov")
}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala
deleted file mode 100644
index fdf6a18fe5..0000000000
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovData.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-/*
- * © 2020. TU Dortmund University,
- * Institute of Energy Systems, Energy Efficiency and Energy Economics,
- * Research group Distribution grid planning and operation
- */
-
-package edu.ie3.simona.model.participant.load.markov
-abstract class MarkovData
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
index 761abbb0ba..ccdae22d4d 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
@@ -6,6 +6,33 @@
package edu.ie3.simona.model.participant.load.markov
-import edu.ie3.simona.model.participant.load.markov.MarkovData
+import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
+import edu.ie3.simona.model.participant.ModelState.ConstantState
+import edu.ie3.simona.model.participant.control.QControl
+import edu.ie3.simona.model.participant.{CalcRelevantData, SystemParticipant}
+import edu.ie3.util.scala.OperationInterval
+import squants.Power
-abstract class MarkovModel
\ No newline at end of file
+import java.util.UUID
+
+abstract class MarkovModel(
+ uuid: UUID,
+ id: String,
+ operationInterval: OperationInterval,
+ qControl: QControl,
+ sRated: Power,
+ cosPhiRated: Double,
+) extends SystemParticipant[
+ MarkovRelevantData,
+ ApparentPower,
+ ConstantState.type,
+ ](
+ uuid = ???,
+ id = ???,
+ operationInterval = ???,
+ qControl = ???,
+ sRated = ???,
+ cosPhiRated = ???,
+ )
+
+class MarkovRelevantData extends CalcRelevantData
diff --git a/src/test/scala/edu/ie3/simona/agent/participant/MarkovAgentModelCalculationSpec.scala b/src/test/scala/edu/ie3/simona/agent/participant/MarkovAgentModelCalculationSpec.scala
deleted file mode 100644
index af96f4de23..0000000000
--- a/src/test/scala/edu/ie3/simona/agent/participant/MarkovAgentModelCalculationSpec.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-/*
- * © 2024. TU Dortmund University,
- * Institute of Energy Systems, Energy Efficiency and Energy Economics,
- * Research group Distribution grid planning and operation
- */
\ No newline at end of file
diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/MarkovModelSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/MarkovModelSpec.scala
deleted file mode 100644
index af96f4de23..0000000000
--- a/src/test/scala/edu/ie3/simona/model/participant/load/MarkovModelSpec.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-/*
- * © 2024. TU Dortmund University,
- * Institute of Energy Systems, Energy Efficiency and Energy Economics,
- * Research group Distribution grid planning and operation
- */
\ No newline at end of file
From 15dac881b87efd8a6bc122ac506a18643c10f5a7 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Thu, 4 Apr 2024 17:21:17 +0200
Subject: [PATCH 11/36] MarkovAgent Basics Fixed
---
.../agent/participant/load/markov/MarkovAgent.scala | 12 ++++++++----
.../load/markov/MarkovAgentFundamentals.Scala | 3 ---
2 files changed, 8 insertions(+), 7 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
index 99456f64fd..ecb0f7a1bc 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
@@ -12,14 +12,20 @@ import edu.ie3.simona.agent.participant.ParticipantAgent
import edu.ie3.simona.agent.participant.data.Data
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
import edu.ie3.simona.agent.participant.data.secondary.SecondaryDataService
-import edu.ie3.simona.agent.participant.statedata.{BaseStateData, ParticipantStateData}
+import edu.ie3.simona.agent.participant.statedata.{
+ BaseStateData,
+ ParticipantStateData,
+}
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.ParticipantInitializeStateData
import edu.ie3.simona.agent.state.AgentState
import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
import edu.ie3.simona.event.notifier.NotifierConfig
import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
import edu.ie3.simona.model.participant.ModelState.ConstantState
-import edu.ie3.simona.model.participant.load.markov.{MarkovModel, MarkovRelevantData}
+import edu.ie3.simona.model.participant.load.markov.{
+ MarkovModel,
+ MarkovRelevantData,
+}
import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage
import edu.ie3.util.scala.quantities.ReactivePower
import org.apache.pekko.actor.{ActorRef, FSM, Props, typed}
@@ -28,8 +34,6 @@ import squants.{Dimensionless, Power}
import java.time.ZonedDateTime
import java.util.UUID
-
-
object MarkovAgent {
def props(
scheduler: ActorRef,
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
index e5865391b1..064039a348 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
@@ -127,9 +127,6 @@ override protected def createInitialState(
tick: Long,
): MarkovRelevantData = {
- MarkovRelevantData(
- None,
- )
}
/** Handle an active power change by flex control.
From 9846f82c1c8a00005770d4c075c6cdd9b3d13d76 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Thu, 11 Apr 2024 13:49:47 +0200
Subject: [PATCH 12/36] SwitchOnProbabilityKey for MarkovParamStore
---
.../load/markov}/appliances/average_hh.csv | 0
.../load/markov}/appliances/by_income.csv | 0
.../markov}/appliances/by_inhabitants.csv | 0
.../load/markov}/appliances/by_type.csv | 0
.../load/markov}/appliances/load_ts.csv | 0
.../switch_on_probabilities/dish_washer.csv | 0
.../switch_on_probabilities/dryer.csv | 0
.../switch_on_probabilities/freezer.csv | 0
.../switch_on_probabilities/fridge.csv | 0
.../switch_on_probabilities/lighting.csv | 0
.../switch_on_probabilities/pc.csv | 0
.../switch_on_probabilities/stove.csv | 0
.../telecommunication.csv | 0
.../switch_on_probabilities/television.csv | 0
.../video_recorder.csv | 0
.../washing_machine.csv | 0
.../switch_on_probabilities/water_heating.csv | 0
.../usage_probabilities.csv | 0
.../load/markov/MarkovParamStore.scala | 25 ++++++++
.../load/markov/MarkovParameters.scala | 31 +++++++++
.../load/markov/SwitchOnProbabilityKey.scala | 63 +++++++++++++++++++
.../load/random/RandomLoadParamStore.scala | 2 +-
22 files changed, 120 insertions(+), 1 deletion(-)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/appliances/average_hh.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/appliances/by_income.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/appliances/by_inhabitants.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/appliances/by_type.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/appliances/load_ts.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/dish_washer.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/dryer.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/freezer.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/fridge.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/lighting.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/pc.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/stove.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/telecommunication.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/television.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/video_recorder.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/washing_machine.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/switch_on_probabilities/water_heating.csv (100%)
rename src/main/{scala/edu/ie3/simona/model/participant/load/markov/resources => resources/load/markov}/probabilities/usage_probabilities/usage_probabilities.csv (100%)
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParameters.scala
create mode 100644 src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/average_hh.csv b/src/main/resources/load/markov/appliances/average_hh.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/average_hh.csv
rename to src/main/resources/load/markov/appliances/average_hh.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_income.csv b/src/main/resources/load/markov/appliances/by_income.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_income.csv
rename to src/main/resources/load/markov/appliances/by_income.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_inhabitants.csv b/src/main/resources/load/markov/appliances/by_inhabitants.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_inhabitants.csv
rename to src/main/resources/load/markov/appliances/by_inhabitants.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_type.csv b/src/main/resources/load/markov/appliances/by_type.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/by_type.csv
rename to src/main/resources/load/markov/appliances/by_type.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/load_ts.csv b/src/main/resources/load/markov/appliances/load_ts.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/appliances/load_ts.csv
rename to src/main/resources/load/markov/appliances/load_ts.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/dish_washer.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/dish_washer.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/dryer.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/dryer.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/dryer.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/dryer.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/freezer.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/freezer.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/freezer.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/freezer.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/fridge.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/fridge.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/fridge.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/fridge.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/lighting.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/lighting.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/lighting.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/lighting.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/pc.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/pc.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/pc.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/pc.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/stove.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/stove.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/stove.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/stove.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/telecommunication.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/telecommunication.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/telecommunication.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/telecommunication.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/television.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/television.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/television.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/television.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/video_recorder.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/video_recorder.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/video_recorder.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/video_recorder.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/washing_machine.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/washing_machine.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/washing_machine.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/washing_machine.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/water_heating.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/water_heating.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/switch_on_probabilities/water_heating.csv
rename to src/main/resources/load/markov/probabilities/switch_on_probabilities/water_heating.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/usage_probabilities/usage_probabilities.csv b/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
similarity index 100%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/resources/probabilities/usage_probabilities/usage_probabilities.csv
rename to src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
new file mode 100644
index 0000000000..6cf79192f7
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -0,0 +1,25 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.model.participant.load.markov
+
+import java.io.{InputStreamReader, Reader}
+import java.time.{Duration, ZonedDateTime}
+import com.typesafe.scalalogging.LazyLogging
+import edu.ie3.simona.exceptions.FileIOException
+import edu.ie3.simona.model.participant.load.DayType // Use for Markov too
+
+import edu.ie3.simona.model.participant.load.markov.SwitchOnProbabilityKey
+import org.apache.commons.csv.{CSVFormat, CSVRecord}
+
+import scala.jdk.CollectionConverters._
+
+// appliances
+
+
+
+
+// probabilities
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParameters.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParameters.scala
new file mode 100644
index 0000000000..3ecc0b8a22
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParameters.scala
@@ -0,0 +1,31 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.model.participant.load.markov
+
+import edu.ie3.simona.util.ParsableEnumeration
+
+final case class MarkovParamStore( )
+
+/** Enumeration to list all possible parameters
+ */
+case object MarkovParameters extends ParsableEnumeration {
+ //val K: Value = Value("k")
+
+ val DISH_WASHER: Value = Value("dish_washer")
+ val WASHING_MACHINE: Value = Value("washing_machine")
+ val DRYER: Value = Value("dryer")
+ val STOVE: Value = Value("stove")
+ val FRIDGE: Value = Value("fridge")
+ val FREEZER: Value = Value("freezer")
+ val TELEVISION: Value = Value("television")
+ val VIDEO_RECORDER: Value = Value("video_recorder")
+ val PC: Value = Value("pc")
+ val TELECOMMUNICATION: Value = Value("telecommunication")
+ val LIGHTING: Value = Value("lighting")
+ val WATER_HEATING: Value = Value("water_heating")
+ val OTHER_LOAD: Value = Value("other_load")
+}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala
new file mode 100644
index 0000000000..7f048b7f9e
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala
@@ -0,0 +1,63 @@
+/*
+ * © 2020. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.model.participant.load.markov
+
+import java.time.LocalDateTime
+
+object SwitchOnProbabilityKey {
+ sealed trait Season
+ object Season {
+ case object Spring extends Season
+ case object Summer extends Season
+ case object Autumn extends Season
+ case object Winter extends Season
+ }
+
+ sealed trait DayType
+ object DayType {
+ case object Weekday extends DayType
+ case object Weekend extends DayType
+ }
+
+ case class SwitchOnProbabilityKey(season: Season, dayType: DayType, quarterlyHourOfDay: Int)
+
+ def extractFromDateTime(dateTime: LocalDateTime): SwitchOnProbabilityKey = {
+ val season = getSeason(dateTime)
+ val dayType = getDayType(dateTime)
+ val quarterlyHourOfDay = getQuarterlyHourOfDay(dateTime)
+ SwitchOnProbabilityKey(season, dayType, quarterlyHourOfDay)
+ }
+
+ def getSeason(dateTime: LocalDateTime): Season = {
+ val month = dateTime.getMonthValue
+ if (month >= 3 && month <= 5) Season.Spring
+ else if (month >= 6 && month <= 8) Season.Summer
+ else if (month >= 9 && month <= 11) Season.Autumn
+ else Season.Winter
+ }
+
+ def getDayType(dateTime: LocalDateTime): DayType = {
+ val dayOfWeek = dateTime.getDayOfWeek.getValue
+ if (dayOfWeek >= 1 && dayOfWeek <= 5) DayType.Weekday
+ else DayType.Weekend
+ }
+
+ def getQuarterlyHourOfDay(dateTime: LocalDateTime): Int = {
+ val hour = dateTime.getHour
+ val minute = dateTime.getMinute
+ val quarter = minute / 15
+ hour * 4 + quarter
+ }
+
+ def getAll: Seq[SwitchOnProbabilityKey] = {
+ for {
+ season <- Seq(Season.Spring, Season.Summer, Season.Autumn, Season.Winter)
+ dayType <- Seq(DayType.Weekday, DayType.Weekend)
+ quarterlyHourOfDay <- 0 until (4 * 24)
+ } yield SwitchOnProbabilityKey(season, dayType, quarterlyHourOfDay)
+ }
+}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/random/RandomLoadParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/random/RandomLoadParamStore.scala
index 36635ae940..5f20f6b131 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/random/RandomLoadParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/random/RandomLoadParamStore.scala
@@ -83,7 +83,7 @@ case object RandomLoadParamStore extends LazyLogging {
* @param reader
* a reader that is providing random load parameters from a CSV file
*/
- def initializeDayTypeValues(
+ def initializeDayTypeValues(
reader: Reader
): Map[DayType.Value, TypeDayParameters] = {
val parser = csvParser.parse(reader)
From 841e62b558a65e96df8c9029cacfc53a02149fde Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Fri, 12 Apr 2024 12:28:52 +0200
Subject: [PATCH 13/36] Usage_Probabilities for MarkovParamStore
---
.../participant/load/markov/MarkovAgent.scala | 131 ++++++++++++++++--
.../load/markov/MarkovAgentFundamentals.Scala | 17 +--
...rameters.scala => ApplianceCategory.scala} | 7 +-
.../load/markov/MarkovParamStore.scala | 55 +++++---
4 files changed, 162 insertions(+), 48 deletions(-)
rename src/main/scala/edu/ie3/simona/model/participant/load/markov/{MarkovParameters.scala => ApplianceCategory.scala} (85%)
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
index ecb0f7a1bc..ea8b3edea8 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
@@ -3,32 +3,23 @@
* Institute of Energy Systems, Energy Efficiency and Energy Economics,
* Research group Distribution grid planning and operation
*/
-
+/**
package edu.ie3.simona.agent.participant.load.markov
+import edu.ie3.simona.agent.participant.load.markov.MarkovAgentFundamentals
import edu.ie3.datamodel.models.input.system.LoadInput
import edu.ie3.datamodel.models.result.system.SystemParticipantResult
import edu.ie3.simona.agent.participant.ParticipantAgent
-import edu.ie3.simona.agent.participant.data.Data
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
-import edu.ie3.simona.agent.participant.data.secondary.SecondaryDataService
-import edu.ie3.simona.agent.participant.statedata.{
- BaseStateData,
- ParticipantStateData,
-}
+import edu.ie3.simona.agent.participant.statedata.{BaseStateData, ParticipantStateData}
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.ParticipantInitializeStateData
import edu.ie3.simona.agent.state.AgentState
import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
-import edu.ie3.simona.event.notifier.NotifierConfig
import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
import edu.ie3.simona.model.participant.ModelState.ConstantState
-import edu.ie3.simona.model.participant.load.markov.{
- MarkovModel,
- MarkovRelevantData,
-}
-import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage
+import edu.ie3.simona.model.participant.load.markov.{MarkovModel, MarkovRelevantData}
import edu.ie3.util.scala.quantities.ReactivePower
-import org.apache.pekko.actor.{ActorRef, FSM, Props, typed}
+import org.apache.pekko.actor.{ActorRef, FSM, Props}
import squants.{Dimensionless, Power}
import java.time.ZonedDateTime
@@ -91,4 +82,116 @@ class MarkovAgent(
* 4) Performing model calculations
* "
*/
+ /** Partial function, that is able to transfer
+ * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
+ * into a pair of active and reactive power
+ */
+ override protected val calculateModelPowerFunc: (Long, BaseStateData.ParticipantModelBaseStateData[ApparentPower, MarkovRelevantData, ModelState.ConstantState.type, MarkovModel], ModelState.ConstantState.type, Dimensionless) => ApparentPower = ???
+
+ /** Abstractly calculate the power output of the participant utilising
+ * secondary data. However, it might appear, that not the complete set of
+ * secondary data is available for the given tick. This might especially be
+ * true, if the actor has been additionally activated. This method thereby
+ * has to try and fill up missing data with the last known data, as this is
+ * still supposed to be valid. The secondary data therefore is put to the
+ * calculation relevant data store. The next state is [[Idle]], sending a
+ * [[edu.ie3.simona.ontology.messages.SchedulerMessage.Completion]] to
+ * scheduler and using update result values.
Actual implementation
+ * can be found in each participant's fundamentals.
+ *
+ * @param baseStateData
+ * The base state data with collected secondary data
+ * @param lastModelState
+ * The current model state, before applying changes by externally received
+ * data
+ * @param currentTick
+ * Tick, the trigger belongs to
+ * @param scheduler
+ * [[ActorRef]] to the scheduler in the simulation
+ * @return
+ * [[Idle]] with updated result values
+ */
+ override def calculatePowerWithSecondaryDataAndGoToIdle(baseStateData: BaseStateData.ParticipantModelBaseStateData[ApparentPower, MarkovRelevantData, ModelState.ConstantState.type, MarkovModel], lastModelState: ModelState.ConstantState.type, currentTick: Long, scheduler: ActorRef): FSM.State[AgentState, ParticipantStateData[ApparentPower]] = ???
+
+ override protected def createInitialState(baseStateData: BaseStateData.ParticipantModelBaseStateData[ApparentPower, MarkovRelevantData, ModelState.ConstantState.type, MarkovModel]): ModelState.ConstantState.type = ???
+
+ override protected def createCalcRelevantData(baseStateData: BaseStateData.ParticipantModelBaseStateData[ApparentPower, MarkovRelevantData, ModelState.ConstantState.type, MarkovModel], tick: Long): MarkovRelevantData = ???
+
+ /** Handle an active power change by flex control.
+ *
+ * @param tick
+ * Tick, in which control is issued
+ * @param baseStateData
+ * Base state data of the agent
+ * @param data
+ * Calculation relevant data
+ * @param lastState
+ * Last known model state
+ * @param setPower
+ * Setpoint active power
+ * @return
+ * Updated model state, a result model and a [[FlexChangeIndicator]]
+ */
+ override def handleControlledPowerChange(tick: Long, baseStateData: BaseStateData.ParticipantModelBaseStateData[ApparentPower, MarkovRelevantData, ModelState.ConstantState.type, MarkovModel], data: MarkovRelevantData, lastState: ModelState.ConstantState.type, setPower: Power): (ModelState.ConstantState.type, ApparentPower, FlexChangeIndicator) = ???
+
+ /** Abstract method to build the calculation model from input
+ *
+ * @param inputModel
+ * Input model description
+ * @param modelConfig
+ * Configuration for the model
+ * @param simulationStartDate
+ * Wall clock time of first instant in simulation
+ * @param simulationEndDate
+ * Wall clock time of last instant in simulation
+ * @return
+ */
+ override def buildModel(inputModel: ParticipantStateData.InputModelContainer[LoadInput], modelConfig: LoadRuntimeConfig, simulationStartDate: ZonedDateTime, simulationEndDate: ZonedDateTime): MarkovModel = ???
+
+ /** Update the last known model state with the given external, relevant data
+ *
+ * @param tick
+ * Tick to update state for
+ * @param modelState
+ * Last known model state
+ * @param calcRelevantData
+ * Data, relevant for calculation
+ * @param nodalVoltage
+ * Current nodal voltage of the agent
+ * @param model
+ * Model for calculation
+ * @return
+ * The updated state at given tick under consideration of calculation
+ * relevant data
+ */
+ override protected def updateState(tick: Long, modelState: ModelState.ConstantState.type, calcRelevantData: MarkovRelevantData, nodalVoltage: Dimensionless, model: MarkovModel): ModelState.ConstantState.type = ???
+
+ /** Determine the average result within the given tick window
+ *
+ * @param tickToResults
+ * Mapping from data tick to actual data
+ * @param windowStart
+ * First, included tick of the time window
+ * @param windowEnd
+ * Last, included tick of the time window
+ * @param activeToReactivePowerFuncOpt
+ * An Option on a function, that transfers the active into reactive power
+ * @return
+ * The averaged result
+ */
+ override def averageResults(tickToResults: Map[Long, ApparentPower], windowStart: Long, windowEnd: Long, activeToReactivePowerFuncOpt: Option[Power => ReactivePower]): ApparentPower = ???
+
+ /** Determines the correct result.
+ *
+ * @param uuid
+ * Unique identifier of the physical model
+ * @param dateTime
+ * Real world date of the result
+ * @param result
+ * The primary data to build a result model for
+ * @return
+ * The equivalent event
+ */
+ override protected def buildResult(uuid: UUID, dateTime: ZonedDateTime, result: ApparentPower): SystemParticipantResult = ???
}
+*/
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
index 064039a348..b5d13d95ad 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
@@ -3,11 +3,10 @@
* Institute of Energy Systems, Energy Efficiency and Energy Economics,
* Research group Distribution grid planning and operation
*/
-
+/**
package edu.ie3.simona.agent.participant.load.markov
import edu.ie3.datamodel.models.input.system.LoadInput
-import edu.ie3.simona.agent.ValueStore
import edu.ie3.simona.agent.participant.ParticipantAgent.getAndCheckNodalVoltage
import edu.ie3.simona.agent.participant.ParticipantAgentFundamentals
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.{ApparentPower, ZERO_POWER}
@@ -21,25 +20,20 @@ import edu.ie3.simona.agent.state.AgentState.Idle
import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
import edu.ie3.simona.event.notifier.NotifierConfig
import edu.ie3.simona.exceptions.agent.InconsistentStateException
-import edu.ie3.simona.model.SystemComponent
+
import edu.ie3.simona.model.participant.ModelState.ConstantState
import edu.ie3.simona.model.participant.load.markov.{MarkovModel, MarkovRelevantData}
import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
-import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage.{FlexRequest, FlexResponse}
-import edu.ie3.simona.util.SimonaConstants
-import edu.ie3.simona.util.TickUtil._
-import edu.ie3.util.quantities.PowerSystemUnits.PU
+import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage.{FlexResponse}
+
import edu.ie3.util.quantities.QuantityUtils.RichQuantityDouble
-import edu.ie3.util.scala.OperationInterval
import edu.ie3.util.scala.quantities.ReactivePower
-import org.apache.pekko.actor.typed.scaladsl.adapter.ClassicActorRefOps
import org.apache.pekko.actor.typed.{ActorRef => TypedActorRef}
import org.apache.pekko.actor.{ActorRef, FSM}
-import squants.{Dimensionless, Each, Power}
+import squants.Dimensionless
import java.time.ZonedDateTime
import java.util.UUID
-import scala.collection.SortedSet
import scala.reflect.{ClassTag, classTag}
protected trait MarkovAgentFundamentals
@@ -310,3 +304,4 @@ override val calculateModelPowerFunc: (
model: MarkovModel,
): ModelState.ConstantState.type = modelState
}
+*/
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParameters.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
similarity index 85%
rename from src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParameters.scala
rename to src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
index 3ecc0b8a22..d5980284f1 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParameters.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
@@ -8,11 +8,12 @@ package edu.ie3.simona.model.participant.load.markov
import edu.ie3.simona.util.ParsableEnumeration
-final case class MarkovParamStore( )
+final case class ApplianceCategory()
-/** Enumeration to list all possible parameters
+/**
+ * Enumeration of all considered appliance types
*/
-case object MarkovParameters extends ParsableEnumeration {
+case object ApplianceCategory extends ParsableEnumeration {
//val K: Value = Value("k")
val DISH_WASHER: Value = Value("dish_washer")
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 6cf79192f7..14829eca91 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -1,25 +1,40 @@
-/*
- * © 2024. TU Dortmund University,
- * Institute of Energy Systems, Energy Efficiency and Energy Economics,
- * Research group Distribution grid planning and operation
- */
-
-package edu.ie3.simona.model.participant.load.markov
-
-import java.io.{InputStreamReader, Reader}
-import java.time.{Duration, ZonedDateTime}
import com.typesafe.scalalogging.LazyLogging
-import edu.ie3.simona.exceptions.FileIOException
-import edu.ie3.simona.model.participant.load.DayType // Use for Markov too
-
-import edu.ie3.simona.model.participant.load.markov.SwitchOnProbabilityKey
import org.apache.commons.csv.{CSVFormat, CSVRecord}
+import java.io.{FileInputStream, InputStreamReader, Reader}
+import scala.collection.mutable
import scala.jdk.CollectionConverters._
-// appliances
-
-
-
-
-// probabilities
+object MarkovParamStore extends LazyLogging {
+
+ def main(args: Array[String]): Unit = {
+ val filePath = "/load/markov/probabilities/usage_probabilities.csv"
+ val data = readCSV(filePath)
+ Test(data)
+ }
+
+ def readCSV(filePath: String): Map[String, Double] = {
+ val reader: Reader = new InputStreamReader(new FileInputStream(filePath))
+ val csvFormat: CSVFormat = CSVFormat.DEFAULT.withHeader().withDelimiter(';')
+ val csvRecords: Iterable[CSVRecord] = csvFormat.parse(reader).asScala
+ val dataMap = mutable.Map[String, Double]()
+
+ for (record <- csvRecords) {
+ val applianceCategory = record.get("appliance_category")
+ val usageProbabilityStr = record.get("usage_probability")
+ try {
+ val usageProbability = usageProbabilityStr.toDouble
+ dataMap.put(applianceCategory, usageProbability)
+ } catch {
+ case _: NumberFormatException => logger.warn(s"Invalid usage probability format for $applianceCategory: $usageProbabilityStr")
+ }
+ }
+ dataMap.toMap
+ }
+
+ def Test(data: Map[String, Double]): Unit = {
+ data.foreach { case (applianceCategory, usageProbability) =>
+ println(s"Appliance Category: $applianceCategory, Usage Probability: $usageProbability")
+ }
+ }
+}
From 75a5cc65c28a708ceca06a1a1fa719e849d6d941 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 15 Apr 2024 12:46:15 +0200
Subject: [PATCH 14/36] MarkovParamStore
---
.../load/markov/MarkovParamStore.scala | 54 +++++++++----------
1 file changed, 25 insertions(+), 29 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 14829eca91..c60d82093c 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -1,40 +1,36 @@
+package edu.ie3.simona.model.participant.load.markov
+
+import java.io.{InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
-import org.apache.commons.csv.{CSVFormat, CSVRecord}
+import org.apache.commons.csv.CSVFormat
-import java.io.{FileInputStream, InputStreamReader, Reader}
-import scala.collection.mutable
import scala.jdk.CollectionConverters._
object MarkovParamStore extends LazyLogging {
+ def loadDefaultApplianceProbabilities(): Map[String, Double] = {
+ val reader = getDefaultReader
+ val csvParser = CSVFormat.DEFAULT
+ .withDelimiter(';')
+ .withFirstRecordAsHeader()
+ .parse(reader)
- def main(args: Array[String]): Unit = {
- val filePath = "/load/markov/probabilities/usage_probabilities.csv"
- val data = readCSV(filePath)
- Test(data)
- }
-
- def readCSV(filePath: String): Map[String, Double] = {
- val reader: Reader = new InputStreamReader(new FileInputStream(filePath))
- val csvFormat: CSVFormat = CSVFormat.DEFAULT.withHeader().withDelimiter(';')
- val csvRecords: Iterable[CSVRecord] = csvFormat.parse(reader).asScala
- val dataMap = mutable.Map[String, Double]()
-
- for (record <- csvRecords) {
+ val records = csvParser.getRecords.asScala
+ val probabilitiesMap = records.map { record =>
val applianceCategory = record.get("appliance_category")
- val usageProbabilityStr = record.get("usage_probability")
- try {
- val usageProbability = usageProbabilityStr.toDouble
- dataMap.put(applianceCategory, usageProbability)
- } catch {
- case _: NumberFormatException => logger.warn(s"Invalid usage probability format for $applianceCategory: $usageProbabilityStr")
- }
- }
- dataMap.toMap
+ val usageProbability = record.get("usage_probability").toDouble
+ (applianceCategory, usageProbability)
+ }.toMap
+
+ reader.close()
+ probabilitiesMap
}
- def Test(data: Map[String, Double]): Unit = {
- data.foreach { case (applianceCategory, usageProbability) =>
- println(s"Appliance Category: $applianceCategory, Usage Probability: $usageProbability")
- }
+ private def getDefaultReader: Reader = {
+ logger.info(
+ //
+ )
+ new InputStreamReader(
+ getClass.getResourceAsStream("/load/markov/probabilities/usage_probabilities/usage_probabilities.csv")
+ )
}
}
From 79ba101e6d0915f8d9c30deea5cf32eacb6d2c34 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 15 Apr 2024 12:48:12 +0200
Subject: [PATCH 15/36] MarkovParamStore
---
.../simona/model/participant/load/markov/MarkovParamStore.scala | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index c60d82093c..95d7c48709 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -27,7 +27,7 @@ object MarkovParamStore extends LazyLogging {
private def getDefaultReader: Reader = {
logger.info(
- //
+ "test"
)
new InputStreamReader(
getClass.getResourceAsStream("/load/markov/probabilities/usage_probabilities/usage_probabilities.csv")
From a6320a58fdc92bf8544c89430652b074940b5737 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 15 Apr 2024 13:32:59 +0200
Subject: [PATCH 16/36] Usage Probabilities in MarkovParamStore finished
---
.../load/markov/MarkovParamStore.scala | 19 +++++++++++++++++--
1 file changed, 17 insertions(+), 2 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 95d7c48709..cc038fb1fe 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -7,7 +7,18 @@ import org.apache.commons.csv.CSVFormat
import scala.jdk.CollectionConverters._
object MarkovParamStore extends LazyLogging {
- def loadDefaultApplianceProbabilities(): Map[String, Double] = {
+
+ // Usage Probabilities
+ def main(args: Array[String]): Unit = {
+
+ val probabilitiesMap = Usage_Probabilities()
+ println("Test Funktion: Geladene Gerätewahrscheinlichkeiten:")
+ probabilitiesMap.foreach { case (appliance, probability) =>
+ println(s"$appliance -> $probability")
+ }
+ }
+
+ def Usage_Probabilities(): Map[String, Double] = {
val reader = getDefaultReader
val csvParser = CSVFormat.DEFAULT
.withDelimiter(';')
@@ -27,10 +38,14 @@ object MarkovParamStore extends LazyLogging {
private def getDefaultReader: Reader = {
logger.info(
- "test"
+ "Markov Usage_Probabilities parameters file 'usage_probability.csv' from jar."
)
new InputStreamReader(
getClass.getResourceAsStream("/load/markov/probabilities/usage_probabilities/usage_probabilities.csv")
)
}
+
+ // Switch On Probabilities
+
+
}
From 8b346d795f2988521aa1f14c1719ef275ddeb128 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 15 Apr 2024 14:50:03 +0200
Subject: [PATCH 17/36] average_hh in MarkovParamStore finished
---
.../load/markov/appliances/average_hh.csv | 2 +-
.../load/markov/MarkovParamStore.scala | 42 ++++++++++++++++++-
.../load/markov/SwitchOnProbabilityKey.scala | 6 +--
3 files changed, 44 insertions(+), 6 deletions(-)
diff --git a/src/main/resources/load/markov/appliances/average_hh.csv b/src/main/resources/load/markov/appliances/average_hh.csv
index 48a215957c..5cb72d9a40 100644
--- a/src/main/resources/load/markov/appliances/average_hh.csv
+++ b/src/main/resources/load/markov/appliances/average_hh.csv
@@ -1,2 +1,2 @@
washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-0,972;0,394;0,686;0,984;1,219;0,561;1,58;0,9;1,649;2,963;2,5;0,3;1
\ No newline at end of file
+0.972;0.394;0.686;0.984;1.219;0.561;1.58;0.9;1.649;2.963;2.5;0.3;1
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index cc038fb1fe..70d48e701e 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -1,14 +1,16 @@
package edu.ie3.simona.model.participant.load.markov
+import edu.ie3.simona.model.participant.load.markov.ApplianceCategory
+
import java.io.{InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
+import edu.ie3.simona.model.participant.load.markov.SwitchOnProbabilityKey.SwitchOnProbabilityKey
import org.apache.commons.csv.CSVFormat
import scala.jdk.CollectionConverters._
object MarkovParamStore extends LazyLogging {
- // Usage Probabilities
def main(args: Array[String]): Unit = {
val probabilitiesMap = Usage_Probabilities()
@@ -16,8 +18,14 @@ object MarkovParamStore extends LazyLogging {
probabilitiesMap.foreach { case (appliance, probability) =>
println(s"$appliance -> $probability")
}
- }
+ val averageHHMap = Average_HH()
+ println("Test Funktion: Durchschnittliche Haushaltsnutzungszeiten:")
+ averageHHMap.foreach { case (appliance, value) =>
+ println(s"$appliance -> $value")
+ }
+ }
+ // Usage Probabilities
def Usage_Probabilities(): Map[String, Double] = {
val reader = getDefaultReader
val csvParser = CSVFormat.DEFAULT
@@ -47,5 +55,35 @@ object MarkovParamStore extends LazyLogging {
// Switch On Probabilities
+ // Average HH
+ private def Average_HH(): Map[String, Double] = {
+ val reader = getDefaultReaderForAverageHH
+ val csvParser = CSVFormat.DEFAULT
+ .withDelimiter(';')
+ .parse(reader)
+
+ val records = csvParser.getRecords.asScala
+
+ val averageHHMap = records.headOption match {
+ case Some(headerRecord) =>
+ val applianceNames = headerRecord.iterator().asScala.toSeq
+ val valuesRecord = records.drop(1).headOption.getOrElse(csvParser.iterator().next())
+ val averageHHValues = valuesRecord.iterator().asScala.map(_.toDouble)
+ applianceNames.zip(averageHHValues).toMap
+ case None =>
+ Map.empty[String, Double]
+ }
+
+ reader.close()
+ averageHHMap
+ }
+
+ private def getDefaultReaderForAverageHH: Reader = {
+ logger.info("Markov Average_HH parameters file 'average_hh.csv' from jar.")
+ new InputStreamReader(
+ getClass.getResourceAsStream("/load/markov/appliances/average_hh.csv")
+ )
+ }
+
}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala
index 7f048b7f9e..0c8374e7db 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala
@@ -32,7 +32,7 @@ object SwitchOnProbabilityKey {
SwitchOnProbabilityKey(season, dayType, quarterlyHourOfDay)
}
- def getSeason(dateTime: LocalDateTime): Season = {
+ private def getSeason(dateTime: LocalDateTime): Season = {
val month = dateTime.getMonthValue
if (month >= 3 && month <= 5) Season.Spring
else if (month >= 6 && month <= 8) Season.Summer
@@ -40,13 +40,13 @@ object SwitchOnProbabilityKey {
else Season.Winter
}
- def getDayType(dateTime: LocalDateTime): DayType = {
+ private def getDayType(dateTime: LocalDateTime): DayType = {
val dayOfWeek = dateTime.getDayOfWeek.getValue
if (dayOfWeek >= 1 && dayOfWeek <= 5) DayType.Weekday
else DayType.Weekend
}
- def getQuarterlyHourOfDay(dateTime: LocalDateTime): Int = {
+ private def getQuarterlyHourOfDay(dateTime: LocalDateTime): Int = {
val hour = dateTime.getHour
val minute = dateTime.getMinute
val quarter = minute / 15
From 8f6d8631f851105440f1088e1276959e98bdf355 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Tue, 16 Apr 2024 11:04:31 +0200
Subject: [PATCH 18/36] ByType->Flat/House in MarkovParamStore finished
---
.../load/markov/appliances/by_type.csv | 3 -
.../resources/load/markov/appliances/flat.csv | 2 +
.../load/markov/appliances/house.csv | 2 +
.../load/markov/ApplianceCategory.scala | 1 +
.../load/markov/MarkovParamStore.scala | 74 ++++++++++++++++++-
.../model/participant/PvModelSpec.scala | 2 +-
6 files changed, 78 insertions(+), 6 deletions(-)
delete mode 100644 src/main/resources/load/markov/appliances/by_type.csv
create mode 100644 src/main/resources/load/markov/appliances/flat.csv
create mode 100644 src/main/resources/load/markov/appliances/house.csv
diff --git a/src/main/resources/load/markov/appliances/by_type.csv b/src/main/resources/load/markov/appliances/by_type.csv
deleted file mode 100644
index c4d394b95c..0000000000
--- a/src/main/resources/load/markov/appliances/by_type.csv
+++ /dev/null
@@ -1,3 +0,0 @@
-type;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-flat;0,926;0,269;0,545;0,94;1,088;0,368;1,354;0,807;1,453;2,535;1,5;0,1;1
-house;1,032;0,561;0,873;1,043;1,393;0,817;1,88;1,023;1,91;3,53;2,5;0,3;1,5
\ No newline at end of file
diff --git a/src/main/resources/load/markov/appliances/flat.csv b/src/main/resources/load/markov/appliances/flat.csv
new file mode 100644
index 0000000000..b4b776a7e4
--- /dev/null
+++ b/src/main/resources/load/markov/appliances/flat.csv
@@ -0,0 +1,2 @@
+washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
+0.926;0.269;0.545;0.94;1.088;0.368;1.354;0.807;1.453;2.535;1.5;0.1;1
\ No newline at end of file
diff --git a/src/main/resources/load/markov/appliances/house.csv b/src/main/resources/load/markov/appliances/house.csv
new file mode 100644
index 0000000000..e5124251fc
--- /dev/null
+++ b/src/main/resources/load/markov/appliances/house.csv
@@ -0,0 +1,2 @@
+washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
+1.032;0.561;0.873;1.043;1.393;0.817;1.88;1.023;1.91;3.53;2.5;0.3;1.5
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
index d5980284f1..15ad1044cc 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
@@ -16,6 +16,7 @@ final case class ApplianceCategory()
case object ApplianceCategory extends ParsableEnumeration {
//val K: Value = Value("k")
+ val TYPE: Value = Value("type")
val DISH_WASHER: Value = Value("dish_washer")
val WASHING_MACHINE: Value = Value("washing_machine")
val DRYER: Value = Value("dryer")
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 70d48e701e..ce653f77ff 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -14,16 +14,29 @@ object MarkovParamStore extends LazyLogging {
def main(args: Array[String]): Unit = {
val probabilitiesMap = Usage_Probabilities()
- println("Test Funktion: Geladene Gerätewahrscheinlichkeiten:")
+ println("Test Function: Usage_Probabilities:")
probabilitiesMap.foreach { case (appliance, probability) =>
println(s"$appliance -> $probability")
}
val averageHHMap = Average_HH()
- println("Test Funktion: Durchschnittliche Haushaltsnutzungszeiten:")
+ println("Test Function: Average:")
averageHHMap.foreach { case (appliance, value) =>
println(s"$appliance -> $value")
}
+
+ val FlatMap = Flat()
+ println("Test Function: Flat:")
+ FlatMap.foreach { case (appliance, value) =>
+ println(s"$appliance -> $value")
+ }
+
+ val HouseMap = House()
+ println("Test Function: House:")
+ HouseMap.foreach { case (appliance, value) =>
+ println(s"$appliance -> $value")
+ }
+
}
// Usage Probabilities
def Usage_Probabilities(): Map[String, Double] = {
@@ -85,5 +98,62 @@ object MarkovParamStore extends LazyLogging {
)
}
+ // By Flat // By House
+ private def Flat(): Map[String, Double] = {
+ val reader = getDefaultReaderForFlat
+ val csvParser = CSVFormat.DEFAULT
+ .withDelimiter(';')
+ .parse(reader)
+
+ val records = csvParser.getRecords.asScala
+
+ val FlatMap = records.headOption match {
+ case Some(headerRecord) =>
+ val applianceNames = headerRecord.iterator().asScala.toSeq
+ val valuesRecord = records.drop(1).headOption.getOrElse(csvParser.iterator().next())
+ val FlatValues = valuesRecord.iterator().asScala.map(_.toDouble)
+ applianceNames.zip(FlatValues).toMap
+ case None =>
+ Map.empty[String, Double]
+ }
+
+ reader.close()
+ FlatMap
+ }
+
+ private def House(): Map[String, Double] = {
+ val reader = getDefaultReaderForHouse
+ val csvParser = CSVFormat.DEFAULT
+ .withDelimiter(';')
+ .parse(reader)
+
+ val records = csvParser.getRecords.asScala
+
+ val HouseMap = records.headOption match {
+ case Some(headerRecord) =>
+ val applianceNames = headerRecord.iterator().asScala.toSeq
+ val valuesRecord = records.drop(1).headOption.getOrElse(csvParser.iterator().next())
+ val HouseValues = valuesRecord.iterator().asScala.map(_.toDouble)
+ applianceNames.zip(HouseValues).toMap
+ case None =>
+ Map.empty[String, Double]
+ }
+
+ reader.close()
+ HouseMap
+ }
+
+ private def getDefaultReaderForFlat: Reader = {
+ logger.info("Markov Flat parameters file 'flat.csv.csv' from jar.")
+ new InputStreamReader(
+ getClass.getResourceAsStream("/load/markov/appliances/flat.csv")
+ )
+ }
+ private def getDefaultReaderForHouse: Reader = {
+ logger.info("Markov House parameters file 'flat.csv.csv' from jar.")
+ new InputStreamReader(
+ getClass.getResourceAsStream("/load/markov/appliances/house.csv")
+ )
+ }
}
diff --git a/src/test/scala/edu/ie3/simona/model/participant/PvModelSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/PvModelSpec.scala
index 660852e28b..6f0a75f687 100644
--- a/src/test/scala/edu/ie3/simona/model/participant/PvModelSpec.scala
+++ b/src/test/scala/edu/ie3/simona/model/participant/PvModelSpec.scala
@@ -702,7 +702,7 @@ class PvModelSpec extends UnitSpec with GivenWhenThen with DefaultTestData {
"eBeamSSol",
),
(40d, 0d, 0d, -11.6d, -37.5d, 37.0d,
- 67.777778d), // flat surface => eBeamS = eBeamH
+ 67.777778d), // flat.csv surface => eBeamS = eBeamH
(40d, 60d, 0d, -11.6d, -37.5d, 37.0d,
112.84217113154841369d), // 2011-02-20T09:00:00
(40d, 60d, 0d, -11.6d, -78.0d, 75.0d, 210.97937494450755d), // sunrise
From 71a4f00c3040cec6379f9e931ba949c664773868 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Fri, 19 Apr 2024 11:59:51 +0200
Subject: [PATCH 19/36] By Income in MarkovParamStore
---
.../load/markov/ApplianceCategory.scala | 1 -
.../load/markov/MarkovParamStore.scala | 51 ++++++++++++++++++-
2 files changed, 50 insertions(+), 2 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
index 15ad1044cc..d5980284f1 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
@@ -16,7 +16,6 @@ final case class ApplianceCategory()
case object ApplianceCategory extends ParsableEnumeration {
//val K: Value = Value("k")
- val TYPE: Value = Value("type")
val DISH_WASHER: Value = Value("dish_washer")
val WASHING_MACHINE: Value = Value("washing_machine")
val DRYER: Value = Value("dryer")
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index ce653f77ff..4a72bcb729 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -2,7 +2,7 @@ package edu.ie3.simona.model.participant.load.markov
import edu.ie3.simona.model.participant.load.markov.ApplianceCategory
-import java.io.{InputStreamReader, Reader}
+import java.io.{File, FileReader, InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
import edu.ie3.simona.model.participant.load.markov.SwitchOnProbabilityKey.SwitchOnProbabilityKey
import org.apache.commons.csv.CSVFormat
@@ -37,6 +37,15 @@ object MarkovParamStore extends LazyLogging {
println(s"$appliance -> $value")
}
+ val incomeData = Income()
+ incomeData.foreach { case (incomeRange, appliancesData) =>
+ println(s"Income Range: $incomeRange")
+ appliancesData.foreach { case (appliance, value) =>
+ println(s"$appliance -> $value")
+ }
+ }
+
+
}
// Usage Probabilities
def Usage_Probabilities(): Map[String, Double] = {
@@ -156,4 +165,44 @@ object MarkovParamStore extends LazyLogging {
)
}
+ // By Income
+
+ def Income(): Map[String, Map[String, Double]] = {
+ val IncomeMap = collection.mutable.Map.empty[String, Map[String, Double]]
+
+ val reader = getDefaultReaderIncome
+ val records = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+
+ val headers = records.getHeaderNames.asScala.toList
+
+ if (headers.nonEmpty) {
+ val incomeHeader = headers.head
+ val applianceHeaders = headers.tail
+
+ records.forEach { record =>
+ val incomeRange = record.get(incomeHeader)
+ val appliancesData = applianceHeaders.map { header =>
+ val appliance = header
+ val value = record.get(header).replace(',', '.').toDouble
+ (appliance, value)
+ }.toMap
+ IncomeMap.put(incomeRange, appliancesData)
+ }
+ }
+
+
+ reader.close()
+ IncomeMap.toMap
+ }
+
+ def getDefaultReaderIncome: Reader = {
+ logger.info("Markov Income parameters file 'by_income.csv' from jar.")
+ new InputStreamReader(
+ getClass.getResourceAsStream("/load/markov/appliances/by_income.csv")
+ )
+ }
+
+
}
+
+
From 98ad4a47ceca13cf2d9e0ffec762e64e10d7df80 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 22 Apr 2024 16:10:41 +0200
Subject: [PATCH 20/36] By Income in MarkovParamStore finished
---
.../load/markov/appliances/by_income.csv | 16 ++---
.../load/markov/MarkovParamStore.scala | 58 +++++++++----------
2 files changed, 36 insertions(+), 38 deletions(-)
diff --git a/src/main/resources/load/markov/appliances/by_income.csv b/src/main/resources/load/markov/appliances/by_income.csv
index 1a45788189..bdc7682029 100644
--- a/src/main/resources/load/markov/appliances/by_income.csv
+++ b/src/main/resources/load/markov/appliances/by_income.csv
@@ -1,9 +1,9 @@
income;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-below 900;0,835;0,154;0,306;0,885;1,024;0,286;1,05;0,559;0,953;1,807;1;0,1;1,3
-from 900 to 1300;0,924;0,219;0,462;0,926;1,059;0,388;1,232;0,637;1,038;2,093;1,2;0,1;1,4
-from 1300 to 1500;0,946;0,269;0,555;0,944;1,099;0,456;1,349;0,721;1,166;2,302;1,8;0,1;1,5
-from 1500 to 2000;0,964;0,33;0,645;0,963;1,14;0,515;1,486;0,83;1,352;2,574;2;0,2;1,6
-from 2000 to 2600;0,996;0,444;0,77;0,998;1,238;0,635;1,665;0,949;1,656;3,082;2,3;0,2;1,8
-from 2600 to 3600;1,02;0,53;0,875;1,03;1,317;0,691;1,871;1,105;2,095;3,644;2,8;0,3;2
-from 3600 to 5000;1,041;0,616;0,954;1,068;1,447;0,751;2,03;1,221;2,499;4,177;3;0,3;2,3
-from 5000 to 18000;1,075;0,694;1,009;1,099;1,59;0,82;2,15;1,335;3,04;4,708;3,2;0,3;2,8
\ No newline at end of file
+below 900;0.835;0.154;0.306;0.885;1.024;0.286;1.05;0.559;0.953;1.807;1;0.1;1.3
+from 900 to 1300;0.924;0.219;0.462;0.926;1.059;0.388;1.232;0.637;1.038;2.093;1.2;0.1;1.4
+from 1300 to 1500;0.946;0.269;0.555;0.944;1.099;0.456;1.349;0.721;1.166;2.302;1.8;0.1;1.5
+from 1500 to 2000;0.964;0.33;0.645;0.963;1.14;0.515;1.486;0.83;1.352;2.574;2;0.2;1.6
+from 2000 to 2600;0.996;0.444;0.77;0.998;1.238;0.635;1.665;0.949;1.656;3.082;2.3;0.2;1.8
+from 2600 to 3600;1.02;0.53;0.875;1.03;1.317;0.691;1.871;1.105;2.095;3.644;2.8;0.3;2
+from 3600 to 5000;1.041;0.616;0.954;1.068;1.447;0.751;2.03;1.221;2.499;4.177;3;0.3;2.3
+from 5000 to 18000;1.075;0.694;1.009;1.099;1.59;0.82;2.15;1.335;3.04;4.708;3.2;0.3;2.8
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 4a72bcb729..53d6040cf5 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -6,7 +6,9 @@ import java.io.{File, FileReader, InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
import edu.ie3.simona.model.participant.load.markov.SwitchOnProbabilityKey.SwitchOnProbabilityKey
import org.apache.commons.csv.CSVFormat
+import scala.collection.mutable.{Map => MutableMap}
+import scala.collection.convert.ImplicitConversions.{`iterable AsScalaIterable`, `map AsJavaMap`}
import scala.jdk.CollectionConverters._
object MarkovParamStore extends LazyLogging {
@@ -37,13 +39,14 @@ object MarkovParamStore extends LazyLogging {
println(s"$appliance -> $value")
}
- val incomeData = Income()
- incomeData.foreach { case (incomeRange, appliancesData) =>
- println(s"Income Range: $incomeRange")
- appliancesData.foreach { case (appliance, value) =>
- println(s"$appliance -> $value")
- }
+ val incomeMap = income()
+ println("Test Function: Income:")
+ incomeMap.foreach { case (incomeCategory, appliancesMap) =>
+ println(s"Income Category: $incomeCategory")
+ appliancesMap.foreach { case (appliance, probability) =>
+ println(s" $appliance -> $probability")
}
+ }
}
@@ -159,43 +162,38 @@ object MarkovParamStore extends LazyLogging {
)
}
private def getDefaultReaderForHouse: Reader = {
- logger.info("Markov House parameters file 'flat.csv.csv' from jar.")
+ logger.info("Markov House parameters file 'house.csv.csv' from jar.")
new InputStreamReader(
getClass.getResourceAsStream("/load/markov/appliances/house.csv")
)
}
// By Income
+ def income(): MutableMap[String, Map[String, Double]] = {
+ val reader = getDefaultReaderIncome
+ val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+ val records = csvParser.getRecords.asScala.toSeq
- def Income(): Map[String, Map[String, Double]] = {
- val IncomeMap = collection.mutable.Map.empty[String, Map[String, Double]]
+ val incomeMap = MutableMap[String, Map[String, Double]]()
- val reader = getDefaultReaderIncome
- val records = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
-
- val headers = records.getHeaderNames.asScala.toList
-
- if (headers.nonEmpty) {
- val incomeHeader = headers.head
- val applianceHeaders = headers.tail
-
- records.forEach { record =>
- val incomeRange = record.get(incomeHeader)
- val appliancesData = applianceHeaders.map { header =>
- val appliance = header
- val value = record.get(header).replace(',', '.').toDouble
- (appliance, value)
- }.toMap
- IncomeMap.put(incomeRange, appliancesData)
- }
- }
+ records.foreach { record =>
+ val incomeCategory = record.get(0)
+ val appliancesMap = MutableMap[String, Double]()
+ for (i <- 1 until record.size()) {
+ val appliance = csvParser.getHeaderNames.get(i)
+ val value = record.get(i).toDouble
+ appliancesMap += (appliance -> value)
+ }
+ incomeMap += (incomeCategory -> appliancesMap.toMap)
+ }
reader.close()
- IncomeMap.toMap
+ incomeMap
+
}
- def getDefaultReaderIncome: Reader = {
+ private def getDefaultReaderIncome: Reader = {
logger.info("Markov Income parameters file 'by_income.csv' from jar.")
new InputStreamReader(
getClass.getResourceAsStream("/load/markov/appliances/by_income.csv")
From 478cd7fa3b86ce829dae7040bb888a72b62b067d Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 22 Apr 2024 17:00:55 +0200
Subject: [PATCH 21/36] By Inhabitants in MarkovParamStore finished
---
.../load/markov/appliances/by_inhabitants.csv | 10 ++--
.../load/markov/MarkovParamStore.scala | 51 ++++++++++++++++---
2 files changed, 50 insertions(+), 11 deletions(-)
diff --git a/src/main/resources/load/markov/appliances/by_inhabitants.csv b/src/main/resources/load/markov/appliances/by_inhabitants.csv
index 684033d37c..14c637bae7 100644
--- a/src/main/resources/load/markov/appliances/by_inhabitants.csv
+++ b/src/main/resources/load/markov/appliances/by_inhabitants.csv
@@ -1,6 +1,6 @@
inhabitants;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-1;0,894;0,223;0,459;0,927;1,055;0,346;1,166;0,645;1,021;1,935;1;0,097;1
-2;1,007;0,431;0,772;1,004;1,282;0,661;1,703;0,923;1,656;3,096;2;0,153;1,5
-3;1,032;0,556;0,894;1,036;1,356;0,711;2,034;1,218;2,451;4,063;2,333;0,208;2
-4;1,05;0,661;0,961;1,052;1,416;0,796;2,099;1,322;2,743;4,601;2,833;0,25;2,5
-5;1,098;0,732;0,988;1,079;1,494;0,904;2,155;1,362;3,133;5,312;3;0,292;3,5
\ No newline at end of file
+1;0.894;0.223;0.459;0.927;1.055;0.346;1.166;0.645;1.021;1.935;1;0.097;1
+2;1.007;0.431;0.772;1.004;1.282;0.661;1.703;0.923;1.656;3.096;2;0.153;1.5
+3;1.032;0.556;0.894;1.036;1.356;0.711;2.034;1.218;2.451;4.063;2.333;0.208;2
+4;1.05;0.661;0.961;1.052;1.416;0.796;2.099;1.322;2.743;4.601;2.833;0.25;2.5
+5;1.098;0.732;0.988;1.079;1.494;0.904;2.155;1.362;3.133;5.312;3;0.292;3.5
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 53d6040cf5..bdc1e554cd 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -1,14 +1,12 @@
package edu.ie3.simona.model.participant.load.markov
-import edu.ie3.simona.model.participant.load.markov.ApplianceCategory
-import java.io.{File, FileReader, InputStreamReader, Reader}
+import java.io.{InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
-import edu.ie3.simona.model.participant.load.markov.SwitchOnProbabilityKey.SwitchOnProbabilityKey
+
import org.apache.commons.csv.CSVFormat
import scala.collection.mutable.{Map => MutableMap}
-import scala.collection.convert.ImplicitConversions.{`iterable AsScalaIterable`, `map AsJavaMap`}
import scala.jdk.CollectionConverters._
object MarkovParamStore extends LazyLogging {
@@ -48,10 +46,19 @@ object MarkovParamStore extends LazyLogging {
}
}
+ val inhabitantsMap = inhabitants()
+ println("Test Function: Inhabitants:")
+ inhabitantsMap.foreach { case (inhabitantsCategory, appliancesMap) =>
+ println(s"inhabitants Category: $inhabitantsCategory")
+ appliancesMap.foreach { case (appliance, probability) =>
+ println(s" $appliance -> $probability")
+ }
+ }
+
}
// Usage Probabilities
- def Usage_Probabilities(): Map[String, Double] = {
+ private def Usage_Probabilities(): Map[String, Double] = {
val reader = getDefaultReader
val csvParser = CSVFormat.DEFAULT
.withDelimiter(';')
@@ -169,7 +176,7 @@ object MarkovParamStore extends LazyLogging {
}
// By Income
- def income(): MutableMap[String, Map[String, Double]] = {
+ private def income(): MutableMap[String, Map[String, Double]] = {
val reader = getDefaultReaderIncome
val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
val records = csvParser.getRecords.asScala.toSeq
@@ -200,6 +207,38 @@ object MarkovParamStore extends LazyLogging {
)
}
+ // By Inhabitants
+
+ private def inhabitants(): MutableMap[String, Map[String, Double]] = {
+ val reader = getDefaultReaderInhabitants
+ val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+ val records = csvParser.getRecords.asScala.toSeq
+
+ val inhabitantsMap = MutableMap[String, Map[String, Double]]()
+
+ records.foreach { record =>
+ val inhabitantCategory = record.get(0)
+ val appliancesMap = MutableMap[String, Double]()
+
+ for (i <- 1 until record.size()) {
+ val appliance = csvParser.getHeaderNames.get(i)
+ val value = record.get(i).toDouble
+ appliancesMap += (appliance -> value)
+ }
+
+ inhabitantsMap += (inhabitantCategory -> appliancesMap.toMap)
+ }
+ reader.close()
+ inhabitantsMap
+ }
+
+ private def getDefaultReaderInhabitants: Reader = {
+ logger.info("Markov Inhabitants parameters file 'by_inhabitants.csv' from jar.")
+ new InputStreamReader(
+ getClass.getResourceAsStream("/load/markov/appliances/by_inhabitants.csv")
+ )
+ }
+
}
From 59a594b8a57fe4941023d0db37ce1718fce086a8 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 22 Apr 2024 19:08:26 +0200
Subject: [PATCH 22/36] load ts in MarkovParamStore
---
.../load/markov/appliances/load_ts.csv | 53 ++++++++++---------
.../load/markov/MarkovParamStore.scala | 45 +++++++++++++++-
2 files changed, 71 insertions(+), 27 deletions(-)
diff --git a/src/main/resources/load/markov/appliances/load_ts.csv b/src/main/resources/load/markov/appliances/load_ts.csv
index 7c92a5fc05..3014e6db04 100644
--- a/src/main/resources/load/markov/appliances/load_ts.csv
+++ b/src/main/resources/load/markov/appliances/load_ts.csv
@@ -1,25 +1,28 @@
-washing_machine,dish_washer,dryer,stove,fridge,freezer,television,video_recorder,pc,telecommunication,lighting,water_heating,other_load
-100,80,2000,700,125,130,150,30,130,40,60,18000,55
-2000,2000,2000,700,1,1,150,30,130,40,60,,
-900,80,2000,700,1,1,150,30,130,40,60,,
-100,80,1600,700,125,130,150,30,130,40,60,,
-100,80,1300,,1,1,150,30,130,40,,,
-300,2000,940,,1,1,150,30,130,40,,,
-50,300,,,125,130,,30,130,40,,,
-,150,,,1,1,,30,130,40,,,
-,,,,1,1,,,,40,,,
-,,,,125,130,,,,40,,,
-,,,,1,1,,,,40,,,
-,,,,1,1,,,,40,,,
-,,,,125,130,,,,40,,,
-,,,,1,1,,,,40,,,
-,,,,1,1,,,,40,,,
-,,,,125,130,,,,40,,,
-,,,,1,1,,,,40,,,
-,,,,1,1,,,,40,,,
-,,,,125,130,,,,40,,,
-,,,,1,1,,,,40,,,
-,,,,1,1,,,,40,,,
-,,,,125,130,,,,40,,,
-,,,,1,1,,,,40,,,
-,,,,1,1,,,,40,,,
+washing_machine;dish_washer;dryer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
+100;80;2000;700;125;130;150;30;130;40;60;18000;55
+2000;2000;2000;700;1;1;150;30;130;40;60;;
+900;80;2000;700;1;1;150;30;130;40;60;;
+100;80;1600;700;125;130;150;30;130;40;60;;
+100;80;1300;;1;1;150;30;130;40;;;
+300;2000;940;;1;1;150;30;130;40;;;
+50;300;;;125;130;;30;130;40;;;
+;150;;;1;1;;30;130;40;;;
+;;;;1;1;;;;40;;;
+;;;;125;130;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;125;130;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;125;130;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;125;130;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;125;130;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;125;130;;;;40;;;
+;;;;1;1;;;;40;;;
+;;;;1;1;;;;40;;;
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index bdc1e554cd..a86beee62b 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -3,12 +3,11 @@ package edu.ie3.simona.model.participant.load.markov
import java.io.{InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
-
import org.apache.commons.csv.CSVFormat
import scala.collection.mutable.{Map => MutableMap}
-
import scala.jdk.CollectionConverters._
+
object MarkovParamStore extends LazyLogging {
def main(args: Array[String]): Unit = {
@@ -55,6 +54,17 @@ object MarkovParamStore extends LazyLogging {
}
}
+ val loadTSList = LoadTS()
+ loadTSList.zipWithIndex.foreach { case (loadTSMap, index) =>
+ println(s"Load TS Map $index:")
+ loadTSMap.foreach { case (appliance, hourlyLoadMap) =>
+ println(s" Appliance: $appliance")
+ hourlyLoadMap.foreach { case (hour, load) =>
+ println(s" Hour $hour: $load")
+ }
+ }
+ }
+
}
// Usage Probabilities
@@ -239,6 +249,37 @@ object MarkovParamStore extends LazyLogging {
)
}
+ // Load TS
+
+ private def LoadTS(): List[Map[String, Map[Int, Double]]] = {
+ val reader = getDefaultReaderLoad_TS
+ val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+ val records = csvParser.getRecords.asScala.toList
+
+ val loadTSList = records.map { record =>
+ val loadTSMap = MutableMap[String, Map[Int, Double]]()
+
+ csvParser.getHeaderNames.asScala.foreach { appliance =>
+ val hourlyLoadMap = MutableMap[Int, Double]()
+
+ loadTSMap += (appliance -> hourlyLoadMap.toMap)
+ }
+
+
+ loadTSMap.toMap
+ }
+
+ reader.close()
+ loadTSList
+ }
+
+ private def getDefaultReaderLoad_TS: Reader = {
+ logger.info("Markov Load_TS parameters file 'by_inhabitants.csv' from jar.")
+ new InputStreamReader(
+ getClass.getResourceAsStream("/load/markov/appliances/load_ts.csv")
+ )
+ }
+
}
From 45c908ddd0343ad855349cd7f50d03a4a58f53e1 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Wed, 24 Apr 2024 20:13:54 +0200
Subject: [PATCH 23/36] MarkovParamStoreSpec
---
.../load/markov/appliances/load_ts.csv | 48 +++++++++----------
.../load/markov/MarkovParamStore.scala | 37 +++++---------
.../load/markov/MarkovParamStoreSpec.scala | 47 ++++++++++++++++++
3 files changed, 81 insertions(+), 51 deletions(-)
create mode 100644 src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
diff --git a/src/main/resources/load/markov/appliances/load_ts.csv b/src/main/resources/load/markov/appliances/load_ts.csv
index 3014e6db04..0b13e07f1d 100644
--- a/src/main/resources/load/markov/appliances/load_ts.csv
+++ b/src/main/resources/load/markov/appliances/load_ts.csv
@@ -1,28 +1,24 @@
washing_machine;dish_washer;dryer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
100;80;2000;700;125;130;150;30;130;40;60;18000;55
-2000;2000;2000;700;1;1;150;30;130;40;60;;
-900;80;2000;700;1;1;150;30;130;40;60;;
-100;80;1600;700;125;130;150;30;130;40;60;;
-100;80;1300;;1;1;150;30;130;40;;;
-300;2000;940;;1;1;150;30;130;40;;;
-50;300;;;125;130;;30;130;40;;;
-;150;;;1;1;;30;130;40;;;
-;;;;1;1;;;;40;;;
-;;;;125;130;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;125;130;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;125;130;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;125;130;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;125;130;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;125;130;;;;40;;;
-;;;;1;1;;;;40;;;
-;;;;1;1;;;;40;;;
+2000;2000;2000;700;1;1;150;30;130;40;60;0;0
+900;80;2000;700;1;1;150;30;130;40;60;0;0
+100;80;1600;700;125;130;150;30;130;40;60;0;0
+100;80;1300;0;1;1;150;30;130;40;0;0;0
+300;2000;940;0;1;1;150;30;130;40;0;0;0
+50;300;0;0;125;130;0;30;130;40;0;0;0
+0;150;0;0;1;1;0;30;130;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
+0;0;0;0;125;130;0;0;0;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
+0;0;0;0;125;130;0;0;0;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
+0;0;0;0;125;130;0;0;0;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
+0;0;0;0;125;130;0;0;0;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
+0;0;0;0;125;130;0;0;0;40;0;0;0
+0;0;0;0;1;1;0;0;0;40;0;0;0
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index a86beee62b..21246db1ac 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -4,9 +4,13 @@ package edu.ie3.simona.model.participant.load.markov
import java.io.{InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.csv.CSVFormat
+
import scala.collection.mutable.{Map => MutableMap}
import scala.jdk.CollectionConverters._
+final case class MarkovParamStore() {
+
+}
object MarkovParamStore extends LazyLogging {
@@ -52,23 +56,11 @@ object MarkovParamStore extends LazyLogging {
appliancesMap.foreach { case (appliance, probability) =>
println(s" $appliance -> $probability")
}
- }
-
- val loadTSList = LoadTS()
- loadTSList.zipWithIndex.foreach { case (loadTSMap, index) =>
- println(s"Load TS Map $index:")
- loadTSMap.foreach { case (appliance, hourlyLoadMap) =>
- println(s" Appliance: $appliance")
- hourlyLoadMap.foreach { case (hour, load) =>
- println(s" Hour $hour: $load")
- }
- }
- }
-
+ }
}
// Usage Probabilities
- private def Usage_Probabilities(): Map[String, Double] = {
+ def Usage_Probabilities(): Map[String, Double] = {
val reader = getDefaultReader
val csvParser = CSVFormat.DEFAULT
.withDelimiter(';')
@@ -98,7 +90,7 @@ object MarkovParamStore extends LazyLogging {
// Switch On Probabilities
// Average HH
- private def Average_HH(): Map[String, Double] = {
+ def Average_HH(): Map[String, Double] = {
val reader = getDefaultReaderForAverageHH
val csvParser = CSVFormat.DEFAULT
.withDelimiter(';')
@@ -128,7 +120,7 @@ object MarkovParamStore extends LazyLogging {
}
// By Flat // By House
- private def Flat(): Map[String, Double] = {
+ def Flat(): Map[String, Double] = {
val reader = getDefaultReaderForFlat
val csvParser = CSVFormat.DEFAULT
.withDelimiter(';')
@@ -150,7 +142,7 @@ object MarkovParamStore extends LazyLogging {
FlatMap
}
- private def House(): Map[String, Double] = {
+ def House(): Map[String, Double] = {
val reader = getDefaultReaderForHouse
val csvParser = CSVFormat.DEFAULT
.withDelimiter(';')
@@ -186,7 +178,7 @@ object MarkovParamStore extends LazyLogging {
}
// By Income
- private def income(): MutableMap[String, Map[String, Double]] = {
+ def income(): MutableMap[String, Map[String, Double]] = {
val reader = getDefaultReaderIncome
val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
val records = csvParser.getRecords.asScala.toSeq
@@ -219,7 +211,7 @@ object MarkovParamStore extends LazyLogging {
// By Inhabitants
- private def inhabitants(): MutableMap[String, Map[String, Double]] = {
+ def inhabitants(): MutableMap[String, Map[String, Double]] = {
val reader = getDefaultReaderInhabitants
val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
val records = csvParser.getRecords.asScala.toSeq
@@ -251,7 +243,7 @@ object MarkovParamStore extends LazyLogging {
// Load TS
- private def LoadTS(): List[Map[String, Map[Int, Double]]] = {
+ def LoadTS(): List[Map[String, Map[Int, Double]]] = {
val reader = getDefaultReaderLoad_TS
val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
val records = csvParser.getRecords.asScala.toList
@@ -259,11 +251,6 @@ object MarkovParamStore extends LazyLogging {
val loadTSList = records.map { record =>
val loadTSMap = MutableMap[String, Map[Int, Double]]()
- csvParser.getHeaderNames.asScala.foreach { appliance =>
- val hourlyLoadMap = MutableMap[Int, Double]()
-
- loadTSMap += (appliance -> hourlyLoadMap.toMap)
- }
loadTSMap.toMap
diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
new file mode 100644
index 0000000000..ec5388d2ec
--- /dev/null
+++ b/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
@@ -0,0 +1,47 @@
+package edu.ie3.simona.model.participant.load.markov
+
+import org.scalatest.matchers.should.Matchers
+import org.scalatest.wordspec.AnyWordSpec
+
+class MarkovParamStoreSpec extends AnyWordSpec with Matchers {
+
+ "MarkovParamStore" should {
+ "print Flat map" in {
+ val flatMap = MarkovParamStore.Flat()
+ println("Test Function: Flat:")
+ flatMap.foreach { case (appliance, value) =>
+ println(s"$appliance -> $value")
+ }
+ }
+
+ "print House map" in {
+ val houseMap = MarkovParamStore.House()
+ println("Test Function: House:")
+ houseMap.foreach { case (appliance, value) =>
+ println(s"$appliance -> $value")
+ }
+ }
+
+ "print Income map" in {
+ val incomeMap = MarkovParamStore.income()
+ println("Test Function: Income:")
+ incomeMap.foreach { case (incomeCategory, appliancesMap) =>
+ println(s"Income Category: $incomeCategory")
+ appliancesMap.foreach { case (appliance, probability) =>
+ println(s" $appliance -> $probability")
+ }
+ }
+ }
+
+ "print Inhabitants map" in {
+ val inhabitantsMap = MarkovParamStore.inhabitants()
+ println("Test Function: Inhabitants:")
+ inhabitantsMap.foreach { case (inhabitantsCategory, appliancesMap) =>
+ println(s"Inhabitants Category: $inhabitantsCategory")
+ appliancesMap.foreach { case (appliance, probability) =>
+ println(s" $appliance -> $probability")
+ }
+ }
+ }
+ }
+}
From 1932f8a9cd6121e4dec6058168de7642210bc6b7 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Tue, 30 Apr 2024 10:32:24 +0200
Subject: [PATCH 24/36] Load_TS in MarkovParamStore finished
---
.../load/markov/MarkovParamStore.scala | 48 ++++++++++++-------
1 file changed, 30 insertions(+), 18 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 21246db1ac..0b4ffb16a2 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -1,10 +1,11 @@
package edu.ie3.simona.model.participant.load.markov
-import java.io.{InputStreamReader, Reader}
+import java.io.{File, InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.csv.CSVFormat
+import scala.collection.mutable
import scala.collection.mutable.{Map => MutableMap}
import scala.jdk.CollectionConverters._
@@ -58,6 +59,16 @@ object MarkovParamStore extends LazyLogging {
}
}
+ def printLoadTSData(loadTSData: mutable.Map[String, Seq[Int]]): Unit = {
+ println("Gerätedaten:")
+ loadTSData.foreach { case (appliance, values) =>
+ println(s"$appliance: ${values.mkString(", ")}")
+ }
+ }
+
+ val loadTSData = Load_TS()
+ printLoadTSData(loadTSData)
+
}
// Usage Probabilities
def Usage_Probabilities(): Map[String, Double] = {
@@ -241,33 +252,34 @@ object MarkovParamStore extends LazyLogging {
)
}
- // Load TS
-
- def LoadTS(): List[Map[String, Map[Int, Double]]] = {
- val reader = getDefaultReaderLoad_TS
+ //Load_TS
+ def Load_TS(): mutable.Map[String, Seq[Int]] = {
+ val reader = getDefaultReaderLoadTS
val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
- val records = csvParser.getRecords.asScala.toList
-
- val loadTSList = records.map { record =>
- val loadTSMap = MutableMap[String, Map[Int, Double]]()
-
-
-
- loadTSMap.toMap
+ val records = csvParser.getRecords.asScala.toSeq
+ val header = csvParser.getHeaderNames.asScala.toSeq
+ val load_TS = mutable.Map[String, Seq[Int]]()
+
+ for (record <- records) {
+ for (i <- header.indices) {
+ val applianceCategory = header(i)
+ val value = record.get(i).toInt
+ val existingValues = load_TS.getOrElse(applianceCategory, Seq())
+ load_TS.put(applianceCategory, existingValues :+ value)
+ }
}
-
reader.close()
- loadTSList
+ load_TS
}
-
- private def getDefaultReaderLoad_TS: Reader = {
- logger.info("Markov Load_TS parameters file 'by_inhabitants.csv' from jar.")
+ def getDefaultReaderLoadTS: Reader = {
+ logger.info("Markov Income parameters file 'load_ts.csv' from jar.")
new InputStreamReader(
getClass.getResourceAsStream("/load/markov/appliances/load_ts.csv")
)
}
+
}
From 444f29756dbb4d4898b89fbc9af8ff193bc7c052 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 6 May 2024 11:07:36 +0200
Subject: [PATCH 25/36] Load_TS in MarkovParamStore finished
---
src/main/resources/load/markov/appliances/by_Type.csv | 3 +++
1 file changed, 3 insertions(+)
create mode 100644 src/main/resources/load/markov/appliances/by_Type.csv
diff --git a/src/main/resources/load/markov/appliances/by_Type.csv b/src/main/resources/load/markov/appliances/by_Type.csv
new file mode 100644
index 0000000000..f6def28116
--- /dev/null
+++ b/src/main/resources/load/markov/appliances/by_Type.csv
@@ -0,0 +1,3 @@
+type;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
+flat;0,926;0,269;0,545;0,94;1,088;0,368;1,354;0,807;1,453;2,535;1,5;0,1;1
+house;1,032;0,561;0,873;1,043;1,393;0,817;1,88;1,023;1,91;3,53;2,5;0,3;1,5
\ No newline at end of file
From a86f6e2008523c37436fb4df752b6215a0b6b5fd Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Mon, 6 May 2024 13:42:52 +0200
Subject: [PATCH 26/36] by_Type in MarkovParamStore finished
---
.../load/markov/appliances/by_Type.csv | 4 +-
.../resources/load/markov/appliances/flat.csv | 2 -
.../load/markov/appliances/house.csv | 2 -
.../usage_probabilities.csv | 1 +
.../load/markov/MarkovParamStore.scala | 118 +++++++++---------
src/test/scala/MarkovParamStoreSpec.scala | 14 +++
.../model/participant/PvModelSpec.scala | 2 +-
7 files changed, 80 insertions(+), 63 deletions(-)
delete mode 100644 src/main/resources/load/markov/appliances/flat.csv
delete mode 100644 src/main/resources/load/markov/appliances/house.csv
create mode 100644 src/test/scala/MarkovParamStoreSpec.scala
diff --git a/src/main/resources/load/markov/appliances/by_Type.csv b/src/main/resources/load/markov/appliances/by_Type.csv
index f6def28116..8f32f16efd 100644
--- a/src/main/resources/load/markov/appliances/by_Type.csv
+++ b/src/main/resources/load/markov/appliances/by_Type.csv
@@ -1,3 +1,3 @@
type;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-flat;0,926;0,269;0,545;0,94;1,088;0,368;1,354;0,807;1,453;2,535;1,5;0,1;1
-house;1,032;0,561;0,873;1,043;1,393;0,817;1,88;1,023;1,91;3,53;2,5;0,3;1,5
\ No newline at end of file
+flat;0.926;0.269;0.545;0.94;1.088;0.368;1.354;0.807;1.453;2.535;1.5;0.1;1
+house;1.032;0.561;0.873;1.043;1.393;0.817;1.88;1.023;1.91;3.53;2.5;0.3;1.5
diff --git a/src/main/resources/load/markov/appliances/flat.csv b/src/main/resources/load/markov/appliances/flat.csv
deleted file mode 100644
index b4b776a7e4..0000000000
--- a/src/main/resources/load/markov/appliances/flat.csv
+++ /dev/null
@@ -1,2 +0,0 @@
-washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-0.926;0.269;0.545;0.94;1.088;0.368;1.354;0.807;1.453;2.535;1.5;0.1;1
\ No newline at end of file
diff --git a/src/main/resources/load/markov/appliances/house.csv b/src/main/resources/load/markov/appliances/house.csv
deleted file mode 100644
index e5124251fc..0000000000
--- a/src/main/resources/load/markov/appliances/house.csv
+++ /dev/null
@@ -1,2 +0,0 @@
-washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-1.032;0.561;0.873;1.043;1.393;0.817;1.88;1.023;1.91;3.53;2.5;0.3;1.5
\ No newline at end of file
diff --git a/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv b/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
index 10734cd81d..2b3f6a547d 100644
--- a/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
+++ b/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
@@ -12,3 +12,4 @@ dryer;0.44
stove;1
water_heating;1
other_load;1
+
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 0b4ffb16a2..be59e71e52 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -29,17 +29,15 @@ object MarkovParamStore extends LazyLogging {
println(s"$appliance -> $value")
}
- val FlatMap = Flat()
- println("Test Function: Flat:")
- FlatMap.foreach { case (appliance, value) =>
- println(s"$appliance -> $value")
+ val typeMap = Type()
+ println("Test Function: Type:")
+ typeMap.foreach { case (typeCategory, appliancesMap) =>
+ println(s"type Category: $typeCategory")
+ appliancesMap.foreach { case (appliance, probability) =>
+ println(s" $appliance -> $probability")
+ }
}
- val HouseMap = House()
- println("Test Function: House:")
- HouseMap.foreach { case (appliance, value) =>
- println(s"$appliance -> $value")
- }
val incomeMap = income()
println("Test Function: Income:")
@@ -57,7 +55,7 @@ object MarkovParamStore extends LazyLogging {
appliancesMap.foreach { case (appliance, probability) =>
println(s" $appliance -> $probability")
}
- }
+ }
def printLoadTSData(loadTSData: mutable.Map[String, Seq[Int]]): Unit = {
println("Gerätedaten:")
@@ -69,6 +67,13 @@ object MarkovParamStore extends LazyLogging {
val loadTSData = Load_TS()
printLoadTSData(loadTSData)
+ val dishWasher = SOP(getDefaultReaderForSOP("/load/markov/probabilities/switch_on_probabilities/dish_washer.csv"))
+ println("Test Function: dish_washer")
+ dishWasher.foreach { case (appliance, values) =>
+ println(s"$appliance: ${values.mkString(", ")}")
+ }
+
+
}
// Usage Probabilities
def Usage_Probabilities(): Map[String, Double] = {
@@ -100,6 +105,32 @@ object MarkovParamStore extends LazyLogging {
// Switch On Probabilities
+ def SOP(reader: Reader): mutable.Map[String, Seq[Double]] = {
+ val csvParser = CSVFormat.DEFAULT.parse(reader)
+ val records = csvParser.getRecords.asScala.toSeq
+ val header = csvParser.getHeaderNames.asScala.toSeq
+ val switchonprob = mutable.Map[String, Seq[Double]]()
+
+ for (record <- records) {
+ for (i <- header.indices) {
+ val applianceCategory = header(i)
+ val value = record.get(i).toDouble
+ val existingValues = switchonprob.getOrElse(applianceCategory, Seq())
+ switchonprob.put(applianceCategory, existingValues :+ value)
+ }
+ }
+ reader.close()
+ switchonprob
+ }
+
+ def getDefaultReaderForSOP(filePath: String): Reader = {
+ logger.info("Markov Average_HH parameters folder 'Switch on Probabilities' from jar.")
+ new InputStreamReader(
+ getClass.getResourceAsStream(filePath)
+ )
+ }
+
+
// Average HH
def Average_HH(): Map[String, Double] = {
val reader = getDefaultReaderForAverageHH
@@ -130,64 +161,39 @@ object MarkovParamStore extends LazyLogging {
)
}
- // By Flat // By House
- def Flat(): Map[String, Double] = {
- val reader = getDefaultReaderForFlat
- val csvParser = CSVFormat.DEFAULT
- .withDelimiter(';')
- .parse(reader)
-
- val records = csvParser.getRecords.asScala
-
- val FlatMap = records.headOption match {
- case Some(headerRecord) =>
- val applianceNames = headerRecord.iterator().asScala.toSeq
- val valuesRecord = records.drop(1).headOption.getOrElse(csvParser.iterator().next())
- val FlatValues = valuesRecord.iterator().asScala.map(_.toDouble)
- applianceNames.zip(FlatValues).toMap
- case None =>
- Map.empty[String, Double]
- }
+ // By_Type
+ def Type(): MutableMap[String, Map[String, Double]] = {
+ val reader = getDefaultReaderType
+ val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+ val records = csvParser.getRecords.asScala.toSeq
- reader.close()
- FlatMap
- }
+ val TypeMap = MutableMap[String, Map[String, Double]]()
- def House(): Map[String, Double] = {
- val reader = getDefaultReaderForHouse
- val csvParser = CSVFormat.DEFAULT
- .withDelimiter(';')
- .parse(reader)
+ records.foreach { record =>
+ val TypeCategory = record.get(0)
+ val appliancesMap = MutableMap[String, Double]()
- val records = csvParser.getRecords.asScala
+ for (i <- 1 until record.size()) {
+ val appliance = csvParser.getHeaderNames.get(i)
+ val value = record.get(i).toDouble
+ appliancesMap += (appliance -> value)
+ }
- val HouseMap = records.headOption match {
- case Some(headerRecord) =>
- val applianceNames = headerRecord.iterator().asScala.toSeq
- val valuesRecord = records.drop(1).headOption.getOrElse(csvParser.iterator().next())
- val HouseValues = valuesRecord.iterator().asScala.map(_.toDouble)
- applianceNames.zip(HouseValues).toMap
- case None =>
- Map.empty[String, Double]
+ TypeMap += (TypeCategory -> appliancesMap.toMap)
}
-
reader.close()
- HouseMap
- }
+ TypeMap
- private def getDefaultReaderForFlat: Reader = {
- logger.info("Markov Flat parameters file 'flat.csv.csv' from jar.")
- new InputStreamReader(
- getClass.getResourceAsStream("/load/markov/appliances/flat.csv")
- )
}
- private def getDefaultReaderForHouse: Reader = {
- logger.info("Markov House parameters file 'house.csv.csv' from jar.")
+
+ private def getDefaultReaderType: Reader = {
+ logger.info("Markov Income parameters file 'by_Type.csv' from jar.")
new InputStreamReader(
- getClass.getResourceAsStream("/load/markov/appliances/house.csv")
+ getClass.getResourceAsStream("/load/markov/appliances/by_Type.csv")
)
}
+
// By Income
def income(): MutableMap[String, Map[String, Double]] = {
val reader = getDefaultReaderIncome
diff --git a/src/test/scala/MarkovParamStoreSpec.scala b/src/test/scala/MarkovParamStoreSpec.scala
new file mode 100644
index 0000000000..bea34e0625
--- /dev/null
+++ b/src/test/scala/MarkovParamStoreSpec.scala
@@ -0,0 +1,14 @@
+import edu.ie3.simona.model.participant.load.markov.MarkovParamStore
+import edu.ie3.simona.model.participant.load.markov.MarkovParamStore.{SOP, Usage_Probabilities, getDefaultReaderForSOP}
+import org.scalatest.flatspec.AnyFlatSpec
+import org.scalatest.matchers.should.Matchers
+
+class MarkovParamStoreSpec extends AnyFlatSpec with Matchers {
+
+ "A Map" should "have the correct size" in {
+ val map = Usage_Probabilities()
+ map.size should be(2)
+ }
+
+
+}
diff --git a/src/test/scala/edu/ie3/simona/model/participant/PvModelSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/PvModelSpec.scala
index 6f0a75f687..a90001276c 100644
--- a/src/test/scala/edu/ie3/simona/model/participant/PvModelSpec.scala
+++ b/src/test/scala/edu/ie3/simona/model/participant/PvModelSpec.scala
@@ -702,7 +702,7 @@ class PvModelSpec extends UnitSpec with GivenWhenThen with DefaultTestData {
"eBeamSSol",
),
(40d, 0d, 0d, -11.6d, -37.5d, 37.0d,
- 67.777778d), // flat.csv surface => eBeamS = eBeamH
+ 67.777778d), // by_Type.csv surface => eBeamS = eBeamH
(40d, 60d, 0d, -11.6d, -37.5d, 37.0d,
112.84217113154841369d), // 2011-02-20T09:00:00
(40d, 60d, 0d, -11.6d, -78.0d, 75.0d, 210.97937494450755d), // sunrise
From e66bf048b80b2d74691bb3a5877af754f2a3b303 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Tue, 7 May 2024 16:38:57 +0200
Subject: [PATCH 27/36] dish_washer.csv
---
.../switch_on_probabilities/dish_washer.csv | 50 +++++++++----------
1 file changed, 25 insertions(+), 25 deletions(-)
diff --git a/src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv
index 4a2490944b..9fa0caef30 100644
--- a/src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv
+++ b/src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv
@@ -1,25 +1,25 @@
-summer_weekday;summer_saturday;summer_sunday;winter_weekday;winter_saturday;winter_sunday
-0.001;0;0.014;0.005;0.011;0
-0.002;0;0;0.003;0.022;0.005
-0;0;0;0;0.022;0.018
-0;0;0;0;0;0.009
-0;0;0;0;0;0.004
-0;0;0;0;0;0.004
-0.04;0;0.014;0.003;0.022;0.015
-0.036;0.005;0.005;0.022;0.022;0.005
-0.04;0.015;0.03;0.054;0.033;0.005
-0.02;0.053;0.041;0.049;0.056;0.036
-0.068;0.102;0.082;0.033;0.089;0.091
-0.032;0.097;0.068;0.054;0.089;0.091
-0.04;0.049;0.055;0.049;0.078;0.109
-0.08;0.097;0.082;0.049;0.078;0.054
-0.12;0.146;0.11;0.082;0.112;0.154
-0.08;0.087;0.055;0.109;0.096;0.091
-0.06;0.049;0.055;0.054;0.045;0.073
-0.04;0.049;0.082;0.027;0.067;0.054
-0.06;0.039;0.088;0.049;0.033;0.045
-0.12;0.049;0.041;0.082;0.022;0.036
-0.1;0.063;0.049;0.131;0.033;0.045
-0.04;0.058;0.068;0.082;0.033;0.027
-0.02;0.034;0.041;0.049;0.022;0.018
-0.004;0.01;0.019;0.014;0.011;0.009
+summer_weekday,summer_saturday,summer_sunday,winter_weekday,winter_saturday,winter_sunday
+0.001,0,0.014,0.005,0.011,0
+0.002,0,0,0.003,0.022,0.005
+0,0,0,0,0.022,0.018
+0,0,0,0,0,0.009
+0,0,0,0,0,0.004
+0,0,0,0,0,0.004
+0.04,0,0.014,0.003,0.022,0.015
+0.036,0.005,0.005,0.022,0.022,0.005
+0.04,0.015,0.03,0.054,0.033,0.005
+0.02,0.053,0.041,0.049,0.056,0.036
+0.068,0.102,0.082,0.033,0.089,0.091
+0.032,0.097,0.068,0.054,0.089,0.091
+0.04,0.049,0.055,0.049,0.078,0.109
+0.08,0.097,0.082,0.049,0.078,0.054
+0.12,0.146,0.11,0.082,0.112,0.154
+0.08,0.087,0.055,0.109,0.096,0.091
+0.06,0.049,0.055,0.054,0.045,0.073
+0.04,0.049,0.082,0.027,0.067,0.054
+0.06,0.039,0.088,0.049,0.033,0.045
+0.12,0.049,0.041,0.082,0.022,0.036
+0.1,0.063,0.049,0.131,0.033,0.045
+0.04,0.058,0.068,0.082,0.033,0.027
+0.02,0.034,0.041,0.049,0.022,0.018
+0.004,0.01,0.019,0.014,0.011,0.009
From 0b19d3c36515a84c26d3157cc2647f70f12dfc75 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Wed, 8 May 2024 13:33:08 +0200
Subject: [PATCH 28/36] Safe
---
.../load/markov/appliances/average_hh.csv | 4 +-
.../load/markov/appliances/by_Type.csv | 7 +-
.../load/markov/appliances/by_income.csv | 19 +--
.../load/markov/appliances/by_inhabitants.csv | 13 +-
.../load/markov/appliances/load_ts.csv | 51 +++----
.../switch_on_probabilities/dish_washer.csv | 50 +++----
.../usage_probabilities.csv | 28 ++--
.../load/markov/MarkovParamStore.scala | 133 +++++++++---------
8 files changed, 153 insertions(+), 152 deletions(-)
diff --git a/src/main/resources/load/markov/appliances/average_hh.csv b/src/main/resources/load/markov/appliances/average_hh.csv
index 5cb72d9a40..f19dbc330d 100644
--- a/src/main/resources/load/markov/appliances/average_hh.csv
+++ b/src/main/resources/load/markov/appliances/average_hh.csv
@@ -1,2 +1,2 @@
-washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-0.972;0.394;0.686;0.984;1.219;0.561;1.58;0.9;1.649;2.963;2.5;0.3;1
\ No newline at end of file
+washing_machine,dryer,dish_washer,stove,fridge,freezer,television,video_recorder,pc,telecommunication,lighting,water_heating,other_load
+0.972,0.394,0.686,0.984,1.219,0.561,1.58,0.9,1.649,2.963,2.5,0.3,1
\ No newline at end of file
diff --git a/src/main/resources/load/markov/appliances/by_Type.csv b/src/main/resources/load/markov/appliances/by_Type.csv
index 8f32f16efd..5115154073 100644
--- a/src/main/resources/load/markov/appliances/by_Type.csv
+++ b/src/main/resources/load/markov/appliances/by_Type.csv
@@ -1,3 +1,4 @@
-type;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-flat;0.926;0.269;0.545;0.94;1.088;0.368;1.354;0.807;1.453;2.535;1.5;0.1;1
-house;1.032;0.561;0.873;1.043;1.393;0.817;1.88;1.023;1.91;3.53;2.5;0.3;1.5
+type,washing_machine,dryer,dish_washer,stove,fridge,freezer,television,video_recorder,pc,telecommunication,lighting,water_heating,other_load
+flat,0.926,0.269,0.545,0.94,1.088,0.368,1.354,0.807,1.453,2.535,1.5,0.1,1
+house,1.032,0.561,0.873,1.043,1.393,0.817,1.88,1.023,1.91,3.53,2.5,0.3,1.5
+
diff --git a/src/main/resources/load/markov/appliances/by_income.csv b/src/main/resources/load/markov/appliances/by_income.csv
index bdc7682029..7e06dca7a2 100644
--- a/src/main/resources/load/markov/appliances/by_income.csv
+++ b/src/main/resources/load/markov/appliances/by_income.csv
@@ -1,9 +1,10 @@
-income;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-below 900;0.835;0.154;0.306;0.885;1.024;0.286;1.05;0.559;0.953;1.807;1;0.1;1.3
-from 900 to 1300;0.924;0.219;0.462;0.926;1.059;0.388;1.232;0.637;1.038;2.093;1.2;0.1;1.4
-from 1300 to 1500;0.946;0.269;0.555;0.944;1.099;0.456;1.349;0.721;1.166;2.302;1.8;0.1;1.5
-from 1500 to 2000;0.964;0.33;0.645;0.963;1.14;0.515;1.486;0.83;1.352;2.574;2;0.2;1.6
-from 2000 to 2600;0.996;0.444;0.77;0.998;1.238;0.635;1.665;0.949;1.656;3.082;2.3;0.2;1.8
-from 2600 to 3600;1.02;0.53;0.875;1.03;1.317;0.691;1.871;1.105;2.095;3.644;2.8;0.3;2
-from 3600 to 5000;1.041;0.616;0.954;1.068;1.447;0.751;2.03;1.221;2.499;4.177;3;0.3;2.3
-from 5000 to 18000;1.075;0.694;1.009;1.099;1.59;0.82;2.15;1.335;3.04;4.708;3.2;0.3;2.8
+income,washing_machine,dryer,dish_washer,stove,fridge,freezer,television,video_recorder,pc,telecommunication,lighting,water_heating,other_load
+below 900,0.835,0.154,0.306,0.885,1.024,0.286,1.05,0.559,0.953,1.807,1,0.1,1.3
+from 900 to 1300,0.924,0.219,0.462,0.926,1.059,0.388,1.232,0.637,1.038,2.093,1.2,0.1,1.4
+from 1300 to 1500,0.946,0.269,0.555,0.944,1.099,0.456,1.349,0.721,1.166,2.302,1.8,0.1,1.5
+from 1500 to 2000,0.964,0.33,0.645,0.963,1.14,0.515,1.486,0.83,1.352,2.574,2,0.2,1.6
+from 2000 to 2600,0.996,0.444,0.77,0.998,1.238,0.635,1.665,0.949,1.656,3.082,2.3,0.2,1.8
+from 2600 to 3600,1.02,0.53,0.875,1.03,1.317,0.691,1.871,1.105,2.095,3.644,2.8,0.3,2
+from 3600 to 5000,1.041,0.616,0.954,1.068,1.447,0.751,2.03,1.221,2.499,4.177,3,0.3,2.3
+from 5000 to 18000,1.075,0.694,1.009,1.099,1.59,0.82,2.15,1.335,3.04,4.708,3.2,0.3,2.8
+
diff --git a/src/main/resources/load/markov/appliances/by_inhabitants.csv b/src/main/resources/load/markov/appliances/by_inhabitants.csv
index 14c637bae7..8bf1eb9c76 100644
--- a/src/main/resources/load/markov/appliances/by_inhabitants.csv
+++ b/src/main/resources/load/markov/appliances/by_inhabitants.csv
@@ -1,6 +1,7 @@
-inhabitants;washing_machine;dryer;dish_washer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-1;0.894;0.223;0.459;0.927;1.055;0.346;1.166;0.645;1.021;1.935;1;0.097;1
-2;1.007;0.431;0.772;1.004;1.282;0.661;1.703;0.923;1.656;3.096;2;0.153;1.5
-3;1.032;0.556;0.894;1.036;1.356;0.711;2.034;1.218;2.451;4.063;2.333;0.208;2
-4;1.05;0.661;0.961;1.052;1.416;0.796;2.099;1.322;2.743;4.601;2.833;0.25;2.5
-5;1.098;0.732;0.988;1.079;1.494;0.904;2.155;1.362;3.133;5.312;3;0.292;3.5
+inhabitants,washing_machine,dryer,dish_washer,stove,fridge,freezer,television,video_recorder,pc,telecommunication,lighting,water_heating,other_load
+1,0.894,0.223,0.459,0.927,1.055,0.346,1.166,0.645,1.021,1.935,1,0.097,1
+2,1.007,0.431,0.772,1.004,1.282,0.661,1.703,0.923,1.656,3.096,2,0.153,1.5
+3,1.032,0.556,0.894,1.036,1.356,0.711,2.034,1.218,2.451,4.063,2.333,0.208,2
+4,1.05,0.661,0.961,1.052,1.416,0.796,2.099,1.322,2.743,4.601,2.833,0.25,2.5
+5,1.098,0.732,0.988,1.079,1.494,0.904,2.155,1.362,3.133,5.312,3,0.292,3.5
+
diff --git a/src/main/resources/load/markov/appliances/load_ts.csv b/src/main/resources/load/markov/appliances/load_ts.csv
index 0b13e07f1d..812746be11 100644
--- a/src/main/resources/load/markov/appliances/load_ts.csv
+++ b/src/main/resources/load/markov/appliances/load_ts.csv
@@ -1,24 +1,27 @@
-washing_machine;dish_washer;dryer;stove;fridge;freezer;television;video_recorder;pc;telecommunication;lighting;water_heating;other_load
-100;80;2000;700;125;130;150;30;130;40;60;18000;55
-2000;2000;2000;700;1;1;150;30;130;40;60;0;0
-900;80;2000;700;1;1;150;30;130;40;60;0;0
-100;80;1600;700;125;130;150;30;130;40;60;0;0
-100;80;1300;0;1;1;150;30;130;40;0;0;0
-300;2000;940;0;1;1;150;30;130;40;0;0;0
-50;300;0;0;125;130;0;30;130;40;0;0;0
-0;150;0;0;1;1;0;30;130;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
-0;0;0;0;125;130;0;0;0;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
-0;0;0;0;125;130;0;0;0;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
-0;0;0;0;125;130;0;0;0;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
-0;0;0;0;125;130;0;0;0;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
-0;0;0;0;125;130;0;0;0;40;0;0;0
-0;0;0;0;1;1;0;0;0;40;0;0;0
+washing_machine,dish_washer,dryer,stove,fridge,freezer,television,video_recorder,pc,telecommunication,lighting,water_heating,other_load
+100,80,2000,700,125,130,150,30,130,40,60,18000,55
+2000,2000,2000,700,1,1,150,30,130,40,60,0,0
+900,80,2000,700,1,1,150,30,130,40,60,0,0
+100,80,1600,700,125,130,150,30,130,40,60,0,0
+100,80,1300,0,1,1,150,30,130,40,0,0,0
+300,2000,940,0,1,1,150,30,130,40,0,0,0
+50,300,0,0,125,130,0,30,130,40,0,0,0
+0,150,0,0,1,1,0,30,130,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,125,130,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,125,130,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,125,130,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,125,130,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,125,130,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
+0,0,0,0,125,130,0,0,0,40,0,0,0
+0,0,0,0,1,1,0,0,0,40,0,0,0
diff --git a/src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv b/src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv
index 9fa0caef30..df55e2fb1c 100644
--- a/src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv
+++ b/src/main/resources/load/markov/probabilities/switch_on_probabilities/dish_washer.csv
@@ -1,25 +1,25 @@
-summer_weekday,summer_saturday,summer_sunday,winter_weekday,winter_saturday,winter_sunday
-0.001,0,0.014,0.005,0.011,0
-0.002,0,0,0.003,0.022,0.005
-0,0,0,0,0.022,0.018
-0,0,0,0,0,0.009
-0,0,0,0,0,0.004
-0,0,0,0,0,0.004
-0.04,0,0.014,0.003,0.022,0.015
-0.036,0.005,0.005,0.022,0.022,0.005
-0.04,0.015,0.03,0.054,0.033,0.005
-0.02,0.053,0.041,0.049,0.056,0.036
-0.068,0.102,0.082,0.033,0.089,0.091
-0.032,0.097,0.068,0.054,0.089,0.091
-0.04,0.049,0.055,0.049,0.078,0.109
-0.08,0.097,0.082,0.049,0.078,0.054
-0.12,0.146,0.11,0.082,0.112,0.154
-0.08,0.087,0.055,0.109,0.096,0.091
-0.06,0.049,0.055,0.054,0.045,0.073
-0.04,0.049,0.082,0.027,0.067,0.054
-0.06,0.039,0.088,0.049,0.033,0.045
-0.12,0.049,0.041,0.082,0.022,0.036
-0.1,0.063,0.049,0.131,0.033,0.045
-0.04,0.058,0.068,0.082,0.033,0.027
-0.02,0.034,0.041,0.049,0.022,0.018
-0.004,0.01,0.019,0.014,0.011,0.009
+summer_weekday;summer_saturday;summer_sunday;winter_weekday;winter_saturday;winter_sunday
+0.001;0;0.014;0.005;0.011;0
+0.002;0;0;0.003;0.022;0.005
+0;0;0;0;0.022;0.018
+0;0;0;0;0;0.009
+0;0;0;0;0;0.004
+0;0;0;0;0;0.004
+0.04;0;0.014;0.003;0.022;0.015
+0.036;0.005;0.005;0.022;0.022;0.005
+0.04;0.015;0.03;0.054;0.033;0.005
+0.02;0.053;0.041;0.049;0.056;0.036
+0.068;0.102;0.082;0.033;0.089;0.091
+0.032;0.097;0.068;0.054;0.089;0.091
+0.04;0.049;0.055;0.049;0.078;0.109
+0.08;0.097;0.082;0.049;0.078;0.054
+0.12;0.146;0.11;0.082;0.112;0.154
+0.08;0.087;0.055;0.109;0.096;0.091
+0.06;0.049;0.055;0.054;0.045;0.073
+0.04;0.049;0.082;0.027;0.067;0.054
+0.06;0.039;0.088;0.049;0.033;0.045
+0.12;0.049;0.041;0.082;0.022;0.036
+0.1;0.063;0.049;0.131;0.033;0.045
+0.04;0.058;0.068;0.082;0.033;0.027
+0.02;0.034;0.041;0.049;0.022;0.018
+0.004;0.01;0.019;0.014;0.011;0.009
\ No newline at end of file
diff --git a/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv b/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
index 2b3f6a547d..4798473d2f 100644
--- a/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
+++ b/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
@@ -1,15 +1,15 @@
-appliance_category;usage_probability
-washing_machine;0.6
-lighting;5
-dish_washer;0.77
-video_recorder;0.15
-telecommunication;1
-pc;1
-fridge;1
-television;2.33
-freezer;1
-dryer;0.44
-stove;1
-water_heating;1
-other_load;1
+appliance_category,usage_probability
+washing_machine,0.6
+lighting,5
+dish_washer,0.77
+video_recorder,0.15
+telecommunication,1
+pc,1
+fridge,1
+television,2.33
+freezer,1
+dryer,0.44
+stove,1
+water_heating,1
+other_load,1
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index be59e71e52..2940729354 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -1,13 +1,14 @@
package edu.ie3.simona.model.participant.load.markov
-import java.io.{File, InputStreamReader, Reader}
+import java.io.{File, FileReader, InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
-import org.apache.commons.csv.CSVFormat
+import org.apache.commons.csv.{CSVFormat, CSVParser}
import scala.collection.mutable
import scala.collection.mutable.{Map => MutableMap}
import scala.jdk.CollectionConverters._
+import scala.io.Source
final case class MarkovParamStore() {
@@ -67,33 +68,38 @@ object MarkovParamStore extends LazyLogging {
val loadTSData = Load_TS()
printLoadTSData(loadTSData)
- val dishWasher = SOP(getDefaultReaderForSOP("/load/markov/probabilities/switch_on_probabilities/dish_washer.csv"))
- println("Test Function: dish_washer")
- dishWasher.foreach { case (appliance, values) =>
- println(s"$appliance: ${values.mkString(", ")}")
+
+
+ def printSOP(SOPData: mutable.Map[String, Seq[Double]]): Unit = {
+ println("Gerätedaten:")
+ SOPData.foreach { case (appliance, values) =>
+ println(s"$appliance: ${values.mkString(", ")}")
+ }
}
+ val SOPData = SOP(getDefaultReaderSOP("/load/markov/probabilities/switch_on_probabilities/dish_washer.csv"))
+ printSOP(SOPData)
+
+
}
// Usage Probabilities
def Usage_Probabilities(): Map[String, Double] = {
val reader = getDefaultReader
- val csvParser = CSVFormat.DEFAULT
- .withDelimiter(';')
- .withFirstRecordAsHeader()
- .parse(reader)
-
- val records = csvParser.getRecords.asScala
+ val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
+ val csvParser = new CSVParser(reader, customFormat)
+ val records = csvParser.iterator().asScala.drop(1)
val probabilitiesMap = records.map { record =>
val applianceCategory = record.get("appliance_category")
val usageProbability = record.get("usage_probability").toDouble
(applianceCategory, usageProbability)
}.toMap
-
reader.close()
probabilitiesMap
}
+
+
private def getDefaultReader: Reader = {
logger.info(
"Markov Usage_Probabilities parameters file 'usage_probability.csv' from jar."
@@ -105,41 +111,41 @@ object MarkovParamStore extends LazyLogging {
// Switch On Probabilities
- def SOP(reader: Reader): mutable.Map[String, Seq[Double]] = {
- val csvParser = CSVFormat.DEFAULT.parse(reader)
+ def SOP(filePath: String): mutable.Map[String, Seq[Double]] = {
+ val reader = getDefaultReaderSOP(filePath)
+ val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
val records = csvParser.getRecords.asScala.toSeq
val header = csvParser.getHeaderNames.asScala.toSeq
- val switchonprob = mutable.Map[String, Seq[Double]]()
+ val sop = mutable.Map[String, Seq[Double]]()
for (record <- records) {
for (i <- header.indices) {
val applianceCategory = header(i)
val value = record.get(i).toDouble
- val existingValues = switchonprob.getOrElse(applianceCategory, Seq())
- switchonprob.put(applianceCategory, existingValues :+ value)
+ val existingValues = sop.getOrElse(applianceCategory, Seq())
+ sop.put(applianceCategory, existingValues :+ value)
}
}
reader.close()
- switchonprob
+ sop
}
- def getDefaultReaderForSOP(filePath: String): Reader = {
- logger.info("Markov Average_HH parameters folder 'Switch on Probabilities' from jar.")
- new InputStreamReader(
- getClass.getResourceAsStream(filePath)
- )
+ import java.io.{File, FileReader, Reader}
+
+ def getDefaultReaderSOP(filePath: String): Reader = {
+ logger.info(s"Markov Income parameters file '$filePath' from jar.")
+ val file = new File(getClass.getResource(filePath).toURI)
+ new FileReader(file)
}
+
+
// Average HH
def Average_HH(): Map[String, Double] = {
val reader = getDefaultReaderForAverageHH
- val csvParser = CSVFormat.DEFAULT
- .withDelimiter(';')
- .parse(reader)
-
+ val csvParser = CSVFormat.DEFAULT.parse(reader)
val records = csvParser.getRecords.asScala
-
val averageHHMap = records.headOption match {
case Some(headerRecord) =>
val applianceNames = headerRecord.iterator().asScala.toSeq
@@ -164,28 +170,23 @@ object MarkovParamStore extends LazyLogging {
// By_Type
def Type(): MutableMap[String, Map[String, Double]] = {
val reader = getDefaultReaderType
- val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+ val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
+ val csvParser = new CSVParser(reader, customFormat)
val records = csvParser.getRecords.asScala.toSeq
-
- val TypeMap = MutableMap[String, Map[String, Double]]()
-
+ val typeMap = MutableMap[String, Map[String, Double]]()
records.foreach { record =>
- val TypeCategory = record.get(0)
+ val typeCategory = record.get(0)
val appliancesMap = MutableMap[String, Double]()
-
- for (i <- 1 until record.size()) {
- val appliance = csvParser.getHeaderNames.get(i)
- val value = record.get(i).toDouble
+ val header = csvParser.getHeaderNames.asScala.drop(1)
+ header.zipWithIndex.foreach { case (appliance, index) =>
+ val value = record.get(index + 1).toDouble
appliancesMap += (appliance -> value)
}
-
- TypeMap += (TypeCategory -> appliancesMap.toMap)
+ typeMap += (typeCategory -> appliancesMap.toMap)
}
reader.close()
- TypeMap
-
+ typeMap
}
-
private def getDefaultReaderType: Reader = {
logger.info("Markov Income parameters file 'by_Type.csv' from jar.")
new InputStreamReader(
@@ -197,29 +198,25 @@ object MarkovParamStore extends LazyLogging {
// By Income
def income(): MutableMap[String, Map[String, Double]] = {
val reader = getDefaultReaderIncome
- val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+ val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
+ val csvParser = new CSVParser(reader, customFormat)
val records = csvParser.getRecords.asScala.toSeq
-
val incomeMap = MutableMap[String, Map[String, Double]]()
-
records.foreach { record =>
val incomeCategory = record.get(0)
val appliancesMap = MutableMap[String, Double]()
-
- for (i <- 1 until record.size()) {
- val appliance = csvParser.getHeaderNames.get(i)
- val value = record.get(i).toDouble
+ val header = csvParser.getHeaderNames.asScala.drop(1)
+ header.zipWithIndex.foreach { case (appliance, index) =>
+ val value = record.get(index + 1).toDouble
appliancesMap += (appliance -> value)
}
-
incomeMap += (incomeCategory -> appliancesMap.toMap)
}
reader.close()
incomeMap
-
}
- private def getDefaultReaderIncome: Reader = {
+ private def getDefaultReaderIncome: Reader = {
logger.info("Markov Income parameters file 'by_income.csv' from jar.")
new InputStreamReader(
getClass.getResourceAsStream("/load/markov/appliances/by_income.csv")
@@ -230,21 +227,18 @@ object MarkovParamStore extends LazyLogging {
def inhabitants(): MutableMap[String, Map[String, Double]] = {
val reader = getDefaultReaderInhabitants
- val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+ val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
+ val csvParser = new CSVParser(reader, customFormat)
val records = csvParser.getRecords.asScala.toSeq
-
val inhabitantsMap = MutableMap[String, Map[String, Double]]()
-
records.foreach { record =>
val inhabitantCategory = record.get(0)
val appliancesMap = MutableMap[String, Double]()
-
- for (i <- 1 until record.size()) {
- val appliance = csvParser.getHeaderNames.get(i)
- val value = record.get(i).toDouble
+ val header = csvParser.getHeaderNames.asScala.drop(1)
+ header.zipWithIndex.foreach { case (appliance, index) =>
+ val value = record.get(index + 1).toDouble
appliancesMap += (appliance -> value)
}
-
inhabitantsMap += (inhabitantCategory -> appliancesMap.toMap)
}
reader.close()
@@ -252,7 +246,7 @@ object MarkovParamStore extends LazyLogging {
}
private def getDefaultReaderInhabitants: Reader = {
- logger.info("Markov Inhabitants parameters file 'by_inhabitants.csv' from jar.")
+ println("Reading by_inhabitants.csv file.")
new InputStreamReader(
getClass.getResourceAsStream("/load/markov/appliances/by_inhabitants.csv")
)
@@ -261,23 +255,24 @@ object MarkovParamStore extends LazyLogging {
//Load_TS
def Load_TS(): mutable.Map[String, Seq[Int]] = {
val reader = getDefaultReaderLoadTS
- val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+ val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
+ val csvParser = new CSVParser(reader, customFormat)
val records = csvParser.getRecords.asScala.toSeq
val header = csvParser.getHeaderNames.asScala.toSeq
- val load_TS = mutable.Map[String, Seq[Int]]()
-
+ val loadTS = mutable.Map[String, Seq[Int]]()
for (record <- records) {
for (i <- header.indices) {
val applianceCategory = header(i)
val value = record.get(i).toInt
- val existingValues = load_TS.getOrElse(applianceCategory, Seq())
- load_TS.put(applianceCategory, existingValues :+ value)
+ val existingValues = loadTS.getOrElse(applianceCategory, Seq())
+ loadTS.put(applianceCategory, existingValues :+ value)
}
}
reader.close()
- load_TS
+ loadTS
}
- def getDefaultReaderLoadTS: Reader = {
+
+ def getDefaultReaderLoadTS: Reader = {
logger.info("Markov Income parameters file 'load_ts.csv' from jar.")
new InputStreamReader(
getClass.getResourceAsStream("/load/markov/appliances/load_ts.csv")
From 80e3e129b53bec8bc41f2ddbd2711c43cc934818 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Fri, 10 May 2024 12:48:42 +0200
Subject: [PATCH 29/36] MarkovParamStore finished
---
.../usage_probabilities.csv | 3 +-
.../load/markov/MarkovParamStore.scala | 130 ++++++------------
2 files changed, 43 insertions(+), 90 deletions(-)
diff --git a/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv b/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
index 4798473d2f..1d82ce52e6 100644
--- a/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
+++ b/src/main/resources/load/markov/probabilities/usage_probabilities/usage_probabilities.csv
@@ -11,5 +11,4 @@ freezer,1
dryer,0.44
stove,1
water_heating,1
-other_load,1
-
+other_load,1
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 2940729354..06e2d91c94 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -1,89 +1,44 @@
+/*
+ * © 2020. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
package edu.ie3.simona.model.participant.load.markov
-import java.io.{File, FileReader, InputStreamReader, Reader}
+import java.io.{InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.csv.{CSVFormat, CSVParser}
+
import scala.collection.mutable
import scala.collection.mutable.{Map => MutableMap}
import scala.jdk.CollectionConverters._
-import scala.io.Source
+
+/** Storage for a collection of MarkovAgent parameters.
+ */
final case class MarkovParamStore() {
}
+/** MarkovPramStore reads values from CSV files and returns them as Maps,
+ * where the keys represent different parameters and the values are the corresponding values.
+ */
+
object MarkovParamStore extends LazyLogging {
def main(args: Array[String]): Unit = {
- val probabilitiesMap = Usage_Probabilities()
- println("Test Function: Usage_Probabilities:")
- probabilitiesMap.foreach { case (appliance, probability) =>
- println(s"$appliance -> $probability")
- }
-
- val averageHHMap = Average_HH()
- println("Test Function: Average:")
- averageHHMap.foreach { case (appliance, value) =>
- println(s"$appliance -> $value")
- }
-
- val typeMap = Type()
- println("Test Function: Type:")
- typeMap.foreach { case (typeCategory, appliancesMap) =>
- println(s"type Category: $typeCategory")
- appliancesMap.foreach { case (appliance, probability) =>
- println(s" $appliance -> $probability")
- }
- }
-
-
- val incomeMap = income()
- println("Test Function: Income:")
- incomeMap.foreach { case (incomeCategory, appliancesMap) =>
- println(s"Income Category: $incomeCategory")
- appliancesMap.foreach { case (appliance, probability) =>
- println(s" $appliance -> $probability")
- }
- }
-
- val inhabitantsMap = inhabitants()
- println("Test Function: Inhabitants:")
- inhabitantsMap.foreach { case (inhabitantsCategory, appliancesMap) =>
- println(s"inhabitants Category: $inhabitantsCategory")
- appliancesMap.foreach { case (appliance, probability) =>
- println(s" $appliance -> $probability")
- }
- }
-
- def printLoadTSData(loadTSData: mutable.Map[String, Seq[Int]]): Unit = {
- println("Gerätedaten:")
- loadTSData.foreach { case (appliance, values) =>
- println(s"$appliance: ${values.mkString(", ")}")
- }
- }
-
- val loadTSData = Load_TS()
- printLoadTSData(loadTSData)
-
-
-
- def printSOP(SOPData: mutable.Map[String, Seq[Double]]): Unit = {
- println("Gerätedaten:")
- SOPData.foreach { case (appliance, values) =>
- println(s"$appliance: ${values.mkString(", ")}")
- }
- }
-
- val SOPData = SOP(getDefaultReaderSOP("/load/markov/probabilities/switch_on_probabilities/dish_washer.csv"))
- printSOP(SOPData)
-
+ }
+ /** This function reads the usage probabilities from a CSV file and returns them as a Map,
+ * where the keys are the appliance categories and the values are the corresponding probabilities.
+ */
- }
// Usage Probabilities
+
def Usage_Probabilities(): Map[String, Double] = {
val reader = getDefaultReader
val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
@@ -98,7 +53,9 @@ object MarkovParamStore extends LazyLogging {
probabilitiesMap
}
-
+ /** @return
+ * A reader pointing to the default Usage_Probabilities parameter location
+ */
private def getDefaultReader: Reader = {
logger.info(
@@ -111,37 +68,34 @@ object MarkovParamStore extends LazyLogging {
// Switch On Probabilities
- def SOP(filePath: String): mutable.Map[String, Seq[Double]] = {
- val reader = getDefaultReaderSOP(filePath)
- val csvParser = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader().parse(reader)
+ def sop_Dish_Washer(): mutable.Map[String, Seq[Int]] = {
+ val reader = getDefaultReadersop_Dish_Washer
+ val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
+ val csvParser = new CSVParser(reader, customFormat)
val records = csvParser.getRecords.asScala.toSeq
val header = csvParser.getHeaderNames.asScala.toSeq
- val sop = mutable.Map[String, Seq[Double]]()
-
+ val dish_Washer = mutable.Map[String, Seq[Int]]()
for (record <- records) {
for (i <- header.indices) {
val applianceCategory = header(i)
- val value = record.get(i).toDouble
- val existingValues = sop.getOrElse(applianceCategory, Seq())
- sop.put(applianceCategory, existingValues :+ value)
+ val value = record.get(i).toInt
+ val existingValues = dish_Washer.getOrElse(applianceCategory, Seq())
+ dish_Washer.put(applianceCategory, existingValues :+ value)
}
}
reader.close()
- sop
+ dish_Washer
}
- import java.io.{File, FileReader, Reader}
-
- def getDefaultReaderSOP(filePath: String): Reader = {
- logger.info(s"Markov Income parameters file '$filePath' from jar.")
- val file = new File(getClass.getResource(filePath).toURI)
- new FileReader(file)
+ def getDefaultReadersop_Dish_Washer: Reader = {
+ logger.info("Markov Income parameters file 'dish_washer.csv' from jar.")
+ new InputStreamReader(
+ getClass.getResourceAsStream("/load/markov/probabilities/switch_on_probabilities/dish_washer.csv")
+ )
}
-
-
-
// Average HH
+
def Average_HH(): Map[String, Double] = {
val reader = getDefaultReaderForAverageHH
val csvParser = CSVFormat.DEFAULT.parse(reader)
@@ -168,6 +122,7 @@ object MarkovParamStore extends LazyLogging {
}
// By_Type
+
def Type(): MutableMap[String, Map[String, Double]] = {
val reader = getDefaultReaderType
val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
@@ -187,6 +142,7 @@ object MarkovParamStore extends LazyLogging {
reader.close()
typeMap
}
+
private def getDefaultReaderType: Reader = {
logger.info("Markov Income parameters file 'by_Type.csv' from jar.")
new InputStreamReader(
@@ -194,8 +150,8 @@ object MarkovParamStore extends LazyLogging {
)
}
-
// By Income
+
def income(): MutableMap[String, Map[String, Double]] = {
val reader = getDefaultReaderIncome
val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
@@ -253,6 +209,7 @@ object MarkovParamStore extends LazyLogging {
}
//Load_TS
+
def Load_TS(): mutable.Map[String, Seq[Int]] = {
val reader = getDefaultReaderLoadTS
val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
@@ -278,9 +235,6 @@ object MarkovParamStore extends LazyLogging {
getClass.getResourceAsStream("/load/markov/appliances/load_ts.csv")
)
}
-
-
-
}
From 61903acd48ee4716e3b0389c3f5cd4a0ceffa262 Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Fri, 10 May 2024 12:50:04 +0200
Subject: [PATCH 30/36] Testfunctions for usage_probabilities and average_hh
---
src/test/scala/MarkovParamStoreSpec.scala | 14 -----
.../load/markov/MarkovParamStoreSpec.scala | 53 +++++--------------
2 files changed, 13 insertions(+), 54 deletions(-)
delete mode 100644 src/test/scala/MarkovParamStoreSpec.scala
diff --git a/src/test/scala/MarkovParamStoreSpec.scala b/src/test/scala/MarkovParamStoreSpec.scala
deleted file mode 100644
index bea34e0625..0000000000
--- a/src/test/scala/MarkovParamStoreSpec.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import edu.ie3.simona.model.participant.load.markov.MarkovParamStore
-import edu.ie3.simona.model.participant.load.markov.MarkovParamStore.{SOP, Usage_Probabilities, getDefaultReaderForSOP}
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should.Matchers
-
-class MarkovParamStoreSpec extends AnyFlatSpec with Matchers {
-
- "A Map" should "have the correct size" in {
- val map = Usage_Probabilities()
- map.size should be(2)
- }
-
-
-}
diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
index ec5388d2ec..fbe4727689 100644
--- a/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
+++ b/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
@@ -1,47 +1,20 @@
-package edu.ie3.simona.model.participant.load.markov
-
+import edu.ie3.simona.model.participant.load.markov.MarkovParamStore
+import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
-import org.scalatest.wordspec.AnyWordSpec
-
-class MarkovParamStoreSpec extends AnyWordSpec with Matchers {
- "MarkovParamStore" should {
- "print Flat map" in {
- val flatMap = MarkovParamStore.Flat()
- println("Test Function: Flat:")
- flatMap.foreach { case (appliance, value) =>
- println(s"$appliance -> $value")
- }
- }
-
- "print House map" in {
- val houseMap = MarkovParamStore.House()
- println("Test Function: House:")
- houseMap.foreach { case (appliance, value) =>
- println(s"$appliance -> $value")
- }
- }
+class MarkovParamStoreSpec extends AnyFlatSpec with Matchers {
- "print Income map" in {
- val incomeMap = MarkovParamStore.income()
- println("Test Function: Income:")
- incomeMap.foreach { case (incomeCategory, appliancesMap) =>
- println(s"Income Category: $incomeCategory")
- appliancesMap.foreach { case (appliance, probability) =>
- println(s" $appliance -> $probability")
- }
- }
+ "usage_probabilities" should "return a map of appliance Category's and their corresponding probabilities" in {
+ val probabilitiesMap = MarkovParamStore.Usage_Probabilities()
+ probabilitiesMap shouldBe a[Map[_, _]]
+ probabilitiesMap.size shouldEqual 12
+ probabilitiesMap.getOrElse("other_load", 0.0) shouldEqual 1
}
- "print Inhabitants map" in {
- val inhabitantsMap = MarkovParamStore.inhabitants()
- println("Test Function: Inhabitants:")
- inhabitantsMap.foreach { case (inhabitantsCategory, appliancesMap) =>
- println(s"Inhabitants Category: $inhabitantsCategory")
- appliancesMap.foreach { case (appliance, probability) =>
- println(s" $appliance -> $probability")
- }
- }
+ "average_hh" should "return a map of appliances and their corresponding probabilities" in {
+ val average_HHMap = MarkovParamStore.Average_HH()
+ average_HHMap shouldBe a [Map[_, _]]
+ average_HHMap.size shouldEqual 13
+ average_HHMap.getOrElse("lighting", 0.0) shouldEqual 2.5
}
}
-}
From bbd52fe3c68aaefec4b32c4a9be72f4edc41a28d Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Wed, 15 May 2024 16:28:55 +0200
Subject: [PATCH 31/36] MarkovModel.scala
---
.../participant/load/markov/MarkovModel.scala | 72 +++++++++++++++++--
1 file changed, 67 insertions(+), 5 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
index ccdae22d4d..a9ccf34fcf 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
@@ -9,13 +9,15 @@ package edu.ie3.simona.model.participant.load.markov
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
import edu.ie3.simona.model.participant.ModelState.ConstantState
import edu.ie3.simona.model.participant.control.QControl
-import edu.ie3.simona.model.participant.{CalcRelevantData, SystemParticipant}
+import edu.ie3.simona.model.participant.load.markov.MarkovParamStore.{Average_HH, Load_TS, Type, Usage_Probabilities, income, inhabitants, sop_Dish_Washer}
+import edu.ie3.simona.model.participant.{CalcRelevantData, FlexChangeIndicator, ModelState, SystemParticipant}
+import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage
import edu.ie3.util.scala.OperationInterval
-import squants.Power
+import squants.{Dimensionless, Power}
import java.util.UUID
-abstract class MarkovModel(
+final case class MarkovModel(
uuid: UUID,
id: String,
operationInterval: OperationInterval,
@@ -33,6 +35,66 @@ abstract class MarkovModel(
qControl = ???,
sRated = ???,
cosPhiRated = ???,
- )
+ ) {
+
+
+ /** Calculate the power behaviour based on the given data.
+ *
+ * @param tick
+ * Regarded instant in simulation
+ * @param voltage
+ * Nodal voltage magnitude
+ * @param modelState
+ * Current state of the model
+ * @param data
+ * Further needed, secondary data
+ * @return
+ * A tuple of active and reactive power
+ */
+ override def calculatePower(tick: Long, voltage: Dimensionless, modelState: ModelState.ConstantState.type, data: MarkovRelevantData): ApparentPower = ???
+
+ /** Calculate the active power behaviour of the model
+ *
+ * @param modelState
+ * Current state of the model
+ * @param data
+ * Further needed, secondary data
+ * @return
+ * Active power
+ */
+ override protected def calculateActivePower(modelState: ModelState.ConstantState.type, data: MarkovRelevantData): Power = ???
+
+ /** @param data
+ * The relevant data for calculation
+ * @param lastState
+ * The last reached state
+ * @return
+ * flex options
+ */
+ override def determineFlexOptions(data: MarkovRelevantData, lastState: ModelState.ConstantState.type): FlexibilityMessage.ProvideFlexOptions = ???
+
+ /** @param data
+ * The relevant data for calculation
+ * @param lastState
+ * The last reached state
+ * @param setPower
+ * power that has been set by ???
+ * @return
+ * updated relevant data and an indication at which circumstances flex
+ * options will change next
+ */
+ override def handleControlledPowerChange(data: MarkovRelevantData, lastState: ModelState.ConstantState.type, setPower: Power): (ModelState.ConstantState.type, FlexChangeIndicator) = ???
+}
+
+class MarkovRelevantData extends CalcRelevantData {
+
+ val Usage_Probabilities_Map = Usage_Probabilities()
+ val sop_Dish_Washer_Map = sop_Dish_Washer()
+
+ val average_Hh_Map = Average_HH()
+ val by_Income_Map = income()
+ val by_Inhabitants_Map = inhabitants()
+ val by_Type_Map = Type()
+ val load_Ts_Map = Load_TS()
+}
-class MarkovRelevantData extends CalcRelevantData
From 487250f1a9caab58fb36a50b6c9d0af70b69bd7c Mon Sep 17 00:00:00 2001
From: pierrepetersmeier
Date: Wed, 22 May 2024 23:17:31 +0200
Subject: [PATCH 32/36] MarkovParamStoreSpec.scala expanded
---
.../participant/load/markov/MarkovModel.scala | 28 ++++++++++---
.../load/markov/MarkovParamStore.scala | 2 +-
.../load/markov/MarkovParamStoreSpec.scala | 41 ++++++++++++++++---
3 files changed, 59 insertions(+), 12 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
index a9ccf34fcf..9318d47182 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
@@ -9,10 +9,11 @@ package edu.ie3.simona.model.participant.load.markov
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
import edu.ie3.simona.model.participant.ModelState.ConstantState
import edu.ie3.simona.model.participant.control.QControl
-import edu.ie3.simona.model.participant.load.markov.MarkovParamStore.{Average_HH, Load_TS, Type, Usage_Probabilities, income, inhabitants, sop_Dish_Washer}
+import edu.ie3.simona.model.participant.load.markov.MarkovParamStore.{Average_HH, Type, Usage_Probabilities, income, inhabitants, load_TS, sop_Dish_Washer}
import edu.ie3.simona.model.participant.{CalcRelevantData, FlexChangeIndicator, ModelState, SystemParticipant}
import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage
import edu.ie3.util.scala.OperationInterval
+import squants.energy.Power
import squants.{Dimensionless, Power}
import java.util.UUID
@@ -51,7 +52,12 @@ final case class MarkovModel(
* @return
* A tuple of active and reactive power
*/
- override def calculatePower(tick: Long, voltage: Dimensionless, modelState: ModelState.ConstantState.type, data: MarkovRelevantData): ApparentPower = ???
+ override def calculatePower(tick: Long, voltage: Dimensionless, modelState: ModelState.ConstantState.type, data: MarkovRelevantData): ApparentPower = {
+ val activePower = calculateActivePower(modelState, data)
+ val reactivePower = activePower * Math.tan(Math.acos(cosPhiRated))
+
+ ApparentPower(activePower, reactivePower)
+ }
/** Calculate the active power behaviour of the model
*
@@ -62,7 +68,13 @@ final case class MarkovModel(
* @return
* Active power
*/
- override protected def calculateActivePower(modelState: ModelState.ConstantState.type, data: MarkovRelevantData): Power = ???
+ override protected def calculateActivePower(modelState: ModelState.ConstantState.type, data: MarkovRelevantData): Power = {
+
+ //Map's
+
+ val activePower = 1 //Test
+ Power(activePower)
+ }
/** @param data
* The relevant data for calculation
@@ -71,7 +83,9 @@ final case class MarkovModel(
* @return
* flex options
*/
- override def determineFlexOptions(data: MarkovRelevantData, lastState: ModelState.ConstantState.type): FlexibilityMessage.ProvideFlexOptions = ???
+ override def determineFlexOptions(data: MarkovRelevantData, lastState: ModelState.ConstantState.type): FlexibilityMessage.ProvideFlexOptions = {
+
+ }
/** @param data
* The relevant data for calculation
@@ -83,7 +97,9 @@ final case class MarkovModel(
* updated relevant data and an indication at which circumstances flex
* options will change next
*/
- override def handleControlledPowerChange(data: MarkovRelevantData, lastState: ModelState.ConstantState.type, setPower: Power): (ModelState.ConstantState.type, FlexChangeIndicator) = ???
+ override def handleControlledPowerChange(data: MarkovRelevantData, lastState: ModelState.ConstantState.type, setPower: Power): (ModelState.ConstantState.type, FlexChangeIndicator) = {
+
+ }
}
class MarkovRelevantData extends CalcRelevantData {
@@ -95,6 +111,6 @@ class MarkovRelevantData extends CalcRelevantData {
val by_Income_Map = income()
val by_Inhabitants_Map = inhabitants()
val by_Type_Map = Type()
- val load_Ts_Map = Load_TS()
+ val load_Ts_Map = load_TS()
}
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 06e2d91c94..027a4e3bc5 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -210,7 +210,7 @@ object MarkovParamStore extends LazyLogging {
//Load_TS
- def Load_TS(): mutable.Map[String, Seq[Int]] = {
+ def load_TS(): mutable.Map[String, Seq[Int]] = {
val reader = getDefaultReaderLoadTS
val customFormat = CSVFormat.DEFAULT.builder().setHeader().build()
val csvParser = new CSVParser(reader, customFormat)
diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
index fbe4727689..0240da676b 100644
--- a/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
+++ b/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
@@ -4,6 +4,12 @@ import org.scalatest.matchers.should.Matchers
class MarkovParamStoreSpec extends AnyFlatSpec with Matchers {
+ "dish_washer" should "return a map of Season_Day Category's and their corresponding probabilities" in {
+ val probabilitiesMap = MarkovParamStore.Usage_Probabilities()
+ probabilitiesMap shouldBe a[Map[_, _]]
+ probabilitiesMap.size shouldEqual 12
+ }
+
"usage_probabilities" should "return a map of appliance Category's and their corresponding probabilities" in {
val probabilitiesMap = MarkovParamStore.Usage_Probabilities()
probabilitiesMap shouldBe a[Map[_, _]]
@@ -11,10 +17,35 @@ class MarkovParamStoreSpec extends AnyFlatSpec with Matchers {
probabilitiesMap.getOrElse("other_load", 0.0) shouldEqual 1
}
- "average_hh" should "return a map of appliances and their corresponding probabilities" in {
- val average_HHMap = MarkovParamStore.Average_HH()
- average_HHMap shouldBe a [Map[_, _]]
- average_HHMap.size shouldEqual 13
- average_HHMap.getOrElse("lighting", 0.0) shouldEqual 2.5
+ "average_hh" should "return a map of appliances and their corresponding probabilities" in {
+ val average_HHMap = MarkovParamStore.Average_HH()
+ average_HHMap shouldBe a [Map[_, _]]
+ average_HHMap.size shouldEqual 13
+ average_HHMap.getOrElse("lighting", 0.0) shouldEqual 2.5
+ }
+
+ "by_type" should "return a map of appliances in a House or Flat and their corresponding probabilities" in {
+ val TypeMap = MarkovParamStore.Type()
+ TypeMap shouldBe a [scala.collection.mutable.Map[_, _]]
+ TypeMap.size shouldEqual 2
}
+
+ "by_income" should "return a map of appliances per income and their corresponding probabilities" in {
+ val incomeMap = MarkovParamStore.income()
+ incomeMap shouldBe a [scala.collection.mutable.Map[_, _]]
+ incomeMap.size shouldEqual 8
+ }
+
+ "by_inhabitants" should "return a map of appliances per inhabitants and their corresponding probabilities" in {
+ val inhabitantsMap = MarkovParamStore.inhabitants()
+ inhabitantsMap shouldBe a [scala.collection.mutable.Map[_, _]]
+ inhabitantsMap.size shouldEqual 5
+ }
+
+ "load_ts" should "return a map of appliances and their corresponding Load Time Series" in {
+ val load_TSMap = MarkovParamStore.load_TS()
+ load_TSMap shouldBe a [scala.collection.mutable.Map[_, _]]
+ load_TSMap.size shouldEqual 13
+ }
+
}
From 294c42735bb154c689a9de90c95a4a355359d2da Mon Sep 17 00:00:00 2001
From: danielfeismann
Date: Thu, 23 May 2024 10:28:06 +0200
Subject: [PATCH 33/36] fix calculatePower
---
.../ie3/simona/model/participant/load/markov/MarkovModel.scala | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
index 9318d47182..376af08614 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
@@ -13,6 +13,7 @@ import edu.ie3.simona.model.participant.load.markov.MarkovParamStore.{Average_HH
import edu.ie3.simona.model.participant.{CalcRelevantData, FlexChangeIndicator, ModelState, SystemParticipant}
import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage
import edu.ie3.util.scala.OperationInterval
+import edu.ie3.util.scala.quantities.ReactivePower
import squants.energy.Power
import squants.{Dimensionless, Power}
@@ -54,7 +55,7 @@ final case class MarkovModel(
*/
override def calculatePower(tick: Long, voltage: Dimensionless, modelState: ModelState.ConstantState.type, data: MarkovRelevantData): ApparentPower = {
val activePower = calculateActivePower(modelState, data)
- val reactivePower = activePower * Math.tan(Math.acos(cosPhiRated))
+ val reactivePower = calculateReactivePower(activePower, voltage)
ApparentPower(activePower, reactivePower)
}
From 35816dbc080969b5f40746534f174dd6c9225356 Mon Sep 17 00:00:00 2001
From: danielfeismann
Date: Thu, 23 May 2024 12:06:22 +0200
Subject: [PATCH 34/36] fmt
---
.../load/markov/ApplianceCategory.scala | 7 +-
.../participant/load/markov/MarkovModel.scala | 122 ++++++++++--------
.../load/markov/MarkovParamStore.scala | 45 ++++---
.../load/markov/SwitchOnProbabilityKey.scala | 6 +-
.../load/random/RandomLoadParamStore.scala | 2 +-
.../load/markov/MarkovParamStoreSpec.scala | 95 +++++++-------
6 files changed, 149 insertions(+), 128 deletions(-)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
index d5980284f1..8d1d9dc53c 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/ApplianceCategory.scala
@@ -10,11 +10,10 @@ import edu.ie3.simona.util.ParsableEnumeration
final case class ApplianceCategory()
-/**
- * Enumeration of all considered appliance types
- */
+/** Enumeration of all considered appliance types
+ */
case object ApplianceCategory extends ParsableEnumeration {
- //val K: Value = Value("k")
+ // val K: Value = Value("k")
val DISH_WASHER: Value = Value("dish_washer")
val WASHING_MACHINE: Value = Value("washing_machine")
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
index 376af08614..a54e36d11e 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
@@ -9,13 +9,17 @@ package edu.ie3.simona.model.participant.load.markov
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
import edu.ie3.simona.model.participant.ModelState.ConstantState
import edu.ie3.simona.model.participant.control.QControl
-import edu.ie3.simona.model.participant.load.markov.MarkovParamStore.{Average_HH, Type, Usage_Probabilities, income, inhabitants, load_TS, sop_Dish_Washer}
-import edu.ie3.simona.model.participant.{CalcRelevantData, FlexChangeIndicator, ModelState, SystemParticipant}
+import edu.ie3.simona.model.participant.load.markov.MarkovParamStore._
+import edu.ie3.simona.model.participant.{
+ CalcRelevantData,
+ FlexChangeIndicator,
+ ModelState,
+ SystemParticipant,
+}
import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage
import edu.ie3.util.scala.OperationInterval
-import edu.ie3.util.scala.quantities.ReactivePower
-import squants.energy.Power
-import squants.{Dimensionless, Power}
+import squants.Dimensionless
+import squants.energy.{Kilowatts, Power}
import java.util.UUID
@@ -39,21 +43,25 @@ final case class MarkovModel(
cosPhiRated = ???,
) {
-
/** Calculate the power behaviour based on the given data.
- *
- * @param tick
- * Regarded instant in simulation
- * @param voltage
- * Nodal voltage magnitude
- * @param modelState
- * Current state of the model
- * @param data
- * Further needed, secondary data
- * @return
- * A tuple of active and reactive power
- */
- override def calculatePower(tick: Long, voltage: Dimensionless, modelState: ModelState.ConstantState.type, data: MarkovRelevantData): ApparentPower = {
+ *
+ * @param tick
+ * Regarded instant in simulation
+ * @param voltage
+ * Nodal voltage magnitude
+ * @param modelState
+ * Current state of the model
+ * @param data
+ * Further needed, secondary data
+ * @return
+ * A tuple of active and reactive power
+ */
+ override def calculatePower(
+ tick: Long,
+ voltage: Dimensionless,
+ modelState: ModelState.ConstantState.type,
+ data: MarkovRelevantData,
+ ): ApparentPower = {
val activePower = calculateActivePower(modelState, data)
val reactivePower = calculateReactivePower(activePower, voltage)
@@ -61,46 +69,51 @@ final case class MarkovModel(
}
/** Calculate the active power behaviour of the model
- *
- * @param modelState
- * Current state of the model
- * @param data
- * Further needed, secondary data
- * @return
- * Active power
- */
- override protected def calculateActivePower(modelState: ModelState.ConstantState.type, data: MarkovRelevantData): Power = {
-
- //Map's
-
- val activePower = 1 //Test
- Power(activePower)
+ *
+ * @param modelState
+ * Current state of the model
+ * @param data
+ * Further needed, secondary data
+ * @return
+ * Active power
+ */
+ override protected def calculateActivePower(
+ modelState: ModelState.ConstantState.type,
+ data: MarkovRelevantData,
+ ): Power = {
+
+ // Map's
+
+ Kilowatts(1) // Test
}
/** @param data
- * The relevant data for calculation
- * @param lastState
- * The last reached state
- * @return
- * flex options
- */
- override def determineFlexOptions(data: MarkovRelevantData, lastState: ModelState.ConstantState.type): FlexibilityMessage.ProvideFlexOptions = {
-
- }
+ * The relevant data for calculation
+ * @param lastState
+ * The last reached state
+ * @return
+ * flex options
+ */
+ override def determineFlexOptions(
+ data: MarkovRelevantData,
+ lastState: ModelState.ConstantState.type,
+ ): FlexibilityMessage.ProvideFlexOptions = ???
/** @param data
- * The relevant data for calculation
- * @param lastState
- * The last reached state
- * @param setPower
- * power that has been set by ???
- * @return
- * updated relevant data and an indication at which circumstances flex
- * options will change next
- */
- override def handleControlledPowerChange(data: MarkovRelevantData, lastState: ModelState.ConstantState.type, setPower: Power): (ModelState.ConstantState.type, FlexChangeIndicator) = {
-
- }
+ * The relevant data for calculation
+ * @param lastState
+ * The last reached state
+ * @param setPower
+ * power that has been set by ???
+ * @return
+ * updated relevant data and an indication at which circumstances flex
+ * options will change next
+ */
+ override def handleControlledPowerChange(
+ data: MarkovRelevantData,
+ lastState: ModelState.ConstantState.type,
+ setPower: Power,
+ ): (ModelState.ConstantState.type, FlexChangeIndicator) = ???
}
class MarkovRelevantData extends CalcRelevantData {
@@ -114,4 +127,3 @@ class MarkovRelevantData extends CalcRelevantData {
val by_Type_Map = Type()
val load_Ts_Map = load_TS()
}
-
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
index 027a4e3bc5..0b550dccf2 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStore.scala
@@ -6,36 +6,32 @@
package edu.ie3.simona.model.participant.load.markov
-
import java.io.{InputStreamReader, Reader}
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.csv.{CSVFormat, CSVParser}
-
import scala.collection.mutable
import scala.collection.mutable.{Map => MutableMap}
import scala.jdk.CollectionConverters._
/** Storage for a collection of MarkovAgent parameters.
- */
+ */
-final case class MarkovParamStore() {
-
-}
+final case class MarkovParamStore() {}
-/** MarkovPramStore reads values from CSV files and returns them as Maps,
- * where the keys represent different parameters and the values are the corresponding values.
- */
+/** MarkovPramStore reads values from CSV files and returns them as Maps, where
+ * the keys represent different parameters and the values are the corresponding
+ * values.
+ */
object MarkovParamStore extends LazyLogging {
- def main(args: Array[String]): Unit = {
-
- }
+ def main(args: Array[String]): Unit = {}
- /** This function reads the usage probabilities from a CSV file and returns them as a Map,
- * where the keys are the appliance categories and the values are the corresponding probabilities.
- */
+ /** This function reads the usage probabilities from a CSV file and returns
+ * them as a Map, where the keys are the appliance categories and the values
+ * are the corresponding probabilities.
+ */
// Usage Probabilities
@@ -54,15 +50,17 @@ object MarkovParamStore extends LazyLogging {
}
/** @return
- * A reader pointing to the default Usage_Probabilities parameter location
- */
+ * A reader pointing to the default Usage_Probabilities parameter location
+ */
private def getDefaultReader: Reader = {
logger.info(
"Markov Usage_Probabilities parameters file 'usage_probability.csv' from jar."
)
new InputStreamReader(
- getClass.getResourceAsStream("/load/markov/probabilities/usage_probabilities/usage_probabilities.csv")
+ getClass.getResourceAsStream(
+ "/load/markov/probabilities/usage_probabilities/usage_probabilities.csv"
+ )
)
}
@@ -90,7 +88,9 @@ object MarkovParamStore extends LazyLogging {
def getDefaultReadersop_Dish_Washer: Reader = {
logger.info("Markov Income parameters file 'dish_washer.csv' from jar.")
new InputStreamReader(
- getClass.getResourceAsStream("/load/markov/probabilities/switch_on_probabilities/dish_washer.csv")
+ getClass.getResourceAsStream(
+ "/load/markov/probabilities/switch_on_probabilities/dish_washer.csv"
+ )
)
}
@@ -103,7 +103,8 @@ object MarkovParamStore extends LazyLogging {
val averageHHMap = records.headOption match {
case Some(headerRecord) =>
val applianceNames = headerRecord.iterator().asScala.toSeq
- val valuesRecord = records.drop(1).headOption.getOrElse(csvParser.iterator().next())
+ val valuesRecord =
+ records.drop(1).headOption.getOrElse(csvParser.iterator().next())
val averageHHValues = valuesRecord.iterator().asScala.map(_.toDouble)
applianceNames.zip(averageHHValues).toMap
case None =>
@@ -208,7 +209,7 @@ object MarkovParamStore extends LazyLogging {
)
}
- //Load_TS
+ // Load_TS
def load_TS(): mutable.Map[String, Seq[Int]] = {
val reader = getDefaultReaderLoadTS
@@ -236,5 +237,3 @@ object MarkovParamStore extends LazyLogging {
)
}
}
-
-
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala
index 0c8374e7db..735fc817ce 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/SwitchOnProbabilityKey.scala
@@ -23,7 +23,11 @@ object SwitchOnProbabilityKey {
case object Weekend extends DayType
}
- case class SwitchOnProbabilityKey(season: Season, dayType: DayType, quarterlyHourOfDay: Int)
+ case class SwitchOnProbabilityKey(
+ season: Season,
+ dayType: DayType,
+ quarterlyHourOfDay: Int,
+ )
def extractFromDateTime(dateTime: LocalDateTime): SwitchOnProbabilityKey = {
val season = getSeason(dateTime)
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/random/RandomLoadParamStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/random/RandomLoadParamStore.scala
index 5f20f6b131..36635ae940 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/random/RandomLoadParamStore.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/random/RandomLoadParamStore.scala
@@ -83,7 +83,7 @@ case object RandomLoadParamStore extends LazyLogging {
* @param reader
* a reader that is providing random load parameters from a CSV file
*/
- def initializeDayTypeValues(
+ def initializeDayTypeValues(
reader: Reader
): Map[DayType.Value, TypeDayParameters] = {
val parser = csvParser.parse(reader)
diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
index 0240da676b..caf74b066e 100644
--- a/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
+++ b/src/test/scala/edu/ie3/simona/model/participant/load/markov/MarkovParamStoreSpec.scala
@@ -1,51 +1,58 @@
-import edu.ie3.simona.model.participant.load.markov.MarkovParamStore
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+package edu.ie3.simona.model.participant.load.markov
+
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class MarkovParamStoreSpec extends AnyFlatSpec with Matchers {
- "dish_washer" should "return a map of Season_Day Category's and their corresponding probabilities" in {
- val probabilitiesMap = MarkovParamStore.Usage_Probabilities()
- probabilitiesMap shouldBe a[Map[_, _]]
- probabilitiesMap.size shouldEqual 12
- }
-
- "usage_probabilities" should "return a map of appliance Category's and their corresponding probabilities" in {
- val probabilitiesMap = MarkovParamStore.Usage_Probabilities()
- probabilitiesMap shouldBe a[Map[_, _]]
- probabilitiesMap.size shouldEqual 12
- probabilitiesMap.getOrElse("other_load", 0.0) shouldEqual 1
- }
-
- "average_hh" should "return a map of appliances and their corresponding probabilities" in {
- val average_HHMap = MarkovParamStore.Average_HH()
- average_HHMap shouldBe a [Map[_, _]]
- average_HHMap.size shouldEqual 13
- average_HHMap.getOrElse("lighting", 0.0) shouldEqual 2.5
- }
-
- "by_type" should "return a map of appliances in a House or Flat and their corresponding probabilities" in {
- val TypeMap = MarkovParamStore.Type()
- TypeMap shouldBe a [scala.collection.mutable.Map[_, _]]
- TypeMap.size shouldEqual 2
- }
-
- "by_income" should "return a map of appliances per income and their corresponding probabilities" in {
- val incomeMap = MarkovParamStore.income()
- incomeMap shouldBe a [scala.collection.mutable.Map[_, _]]
- incomeMap.size shouldEqual 8
- }
-
- "by_inhabitants" should "return a map of appliances per inhabitants and their corresponding probabilities" in {
- val inhabitantsMap = MarkovParamStore.inhabitants()
- inhabitantsMap shouldBe a [scala.collection.mutable.Map[_, _]]
- inhabitantsMap.size shouldEqual 5
- }
-
- "load_ts" should "return a map of appliances and their corresponding Load Time Series" in {
- val load_TSMap = MarkovParamStore.load_TS()
- load_TSMap shouldBe a [scala.collection.mutable.Map[_, _]]
- load_TSMap.size shouldEqual 13
- }
+ "dish_washer" should "return a map of Season_Day Category's and their corresponding probabilities" in {
+ val probabilitiesMap = MarkovParamStore.Usage_Probabilities()
+ probabilitiesMap shouldBe a[Map[_, _]]
+ probabilitiesMap.size shouldEqual 12
+ }
+
+ "usage_probabilities" should "return a map of appliance Category's and their corresponding probabilities" in {
+ val probabilitiesMap = MarkovParamStore.Usage_Probabilities()
+ probabilitiesMap shouldBe a[Map[_, _]]
+ probabilitiesMap.size shouldEqual 12
+ probabilitiesMap.getOrElse("other_load", 0.0) shouldEqual 1
+ }
+
+ "average_hh" should "return a map of appliances and their corresponding probabilities" in {
+ val average_HHMap = MarkovParamStore.Average_HH()
+ average_HHMap shouldBe a[Map[_, _]]
+ average_HHMap.size shouldEqual 13
+ average_HHMap.getOrElse("lighting", 0.0) shouldEqual 2.5
+ }
+
+ "by_type" should "return a map of appliances in a House or Flat and their corresponding probabilities" in {
+ val TypeMap = MarkovParamStore.Type()
+ TypeMap shouldBe a[scala.collection.mutable.Map[_, _]]
+ TypeMap.size shouldEqual 2
+ }
+
+ "by_income" should "return a map of appliances per income and their corresponding probabilities" in {
+ val incomeMap = MarkovParamStore.income()
+ incomeMap shouldBe a[scala.collection.mutable.Map[_, _]]
+ incomeMap.size shouldEqual 8
+ }
+ "by_inhabitants" should "return a map of appliances per inhabitants and their corresponding probabilities" in {
+ val inhabitantsMap = MarkovParamStore.inhabitants()
+ inhabitantsMap shouldBe a[scala.collection.mutable.Map[_, _]]
+ inhabitantsMap.size shouldEqual 5
}
+
+ "load_ts" should "return a map of appliances and their corresponding Load Time Series" in {
+ val load_TSMap = MarkovParamStore.load_TS()
+ load_TSMap shouldBe a[scala.collection.mutable.Map[_, _]]
+ load_TSMap.size shouldEqual 13
+ }
+
+}
From 416cf0d4635fa5b5c3be244b7cd18a04c9559a43 Mon Sep 17 00:00:00 2001
From: danielfeismann
Date: Thu, 23 May 2024 12:07:11 +0200
Subject: [PATCH 35/36] include MarkovAgent into LoadAgent
---
src/main/resources/config/config-template.conf | 2 +-
.../edu/ie3/simona/agent/participant/load/LoadAgent.scala | 3 +++
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/src/main/resources/config/config-template.conf b/src/main/resources/config/config-template.conf
index 07591dc431..3851e3a200 100644
--- a/src/main/resources/config/config-template.conf
+++ b/src/main/resources/config/config-template.conf
@@ -24,7 +24,7 @@ BaseRuntimeConfig {
#@define extends BaseRuntimeConfig
LoadRuntimeConfig {
baseRuntimeConfig: BaseRuntimeConfig
- modelBehaviour: string # How the model behaves. Possible values: fix, profile, random
+ modelBehaviour: string # How the model behaves. Possible values: fix, profile, random, markov
reference: string # Scaling reference for the load model. Possible values: power, energy
}
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/LoadAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/load/LoadAgent.scala
index 338547efbc..669901a16b 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/LoadAgent.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/LoadAgent.scala
@@ -14,6 +14,7 @@ import edu.ie3.simona.agent.participant.load.LoadAgentFundamentals.{
ProfileLoadAgentFundamentals,
RandomLoadAgentFundamentals,
}
+import edu.ie3.simona.agent.participant.load.markov.MarkovAgent
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.ParticipantInitializeStateData
import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
@@ -47,6 +48,8 @@ object LoadAgent {
Props(new ProfileLoadAgent(scheduler, initStateData, listener))
case LoadModelBehaviour.RANDOM =>
Props(new RandomLoadAgent(scheduler, initStateData, listener))
+ case LoadModelBehaviour.MARKOV =>
+ Props(new MarkovAgent(scheduler, initStateData, listener))
case unsupported =>
throw new IllegalArgumentException(
s"The load agent behaviour '$unsupported' is currently not supported."
From a80c76c27984a9138cf36e3cd171afb674f1fc0b Mon Sep 17 00:00:00 2001
From: danielfeismann
Date: Thu, 23 May 2024 15:45:52 +0200
Subject: [PATCH 36/36] some more refactoring
---
.../participant/load/markov/MarkovAgent.scala | 8 +-
.../load/markov/MarkovAgentFundamentals.Scala | 307 --------------
.../load/markov/MarkovAgentFundamentals.scala | 383 ++++++++++++++++++
.../participant/load/markov/MarkovModel.scala | 43 +-
4 files changed, 423 insertions(+), 318 deletions(-)
delete mode 100644 src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
create mode 100644 src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.scala
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
index ea8b3edea8..5bffd87277 100644
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgent.scala
@@ -3,21 +3,20 @@
* Institute of Energy Systems, Energy Efficiency and Energy Economics,
* Research group Distribution grid planning and operation
*/
-/**
+
package edu.ie3.simona.agent.participant.load.markov
-import edu.ie3.simona.agent.participant.load.markov.MarkovAgentFundamentals
import edu.ie3.datamodel.models.input.system.LoadInput
import edu.ie3.datamodel.models.result.system.SystemParticipantResult
import edu.ie3.simona.agent.participant.ParticipantAgent
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
-import edu.ie3.simona.agent.participant.statedata.{BaseStateData, ParticipantStateData}
import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.ParticipantInitializeStateData
+import edu.ie3.simona.agent.participant.statedata.{BaseStateData, ParticipantStateData}
import edu.ie3.simona.agent.state.AgentState
import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
-import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
import edu.ie3.simona.model.participant.ModelState.ConstantState
import edu.ie3.simona.model.participant.load.markov.{MarkovModel, MarkovRelevantData}
+import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
import edu.ie3.util.scala.quantities.ReactivePower
import org.apache.pekko.actor.{ActorRef, FSM, Props}
import squants.{Dimensionless, Power}
@@ -194,4 +193,3 @@ class MarkovAgent(
*/
override protected def buildResult(uuid: UUID, dateTime: ZonedDateTime, result: ApparentPower): SystemParticipantResult = ???
}
-*/
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
deleted file mode 100644
index b5d13d95ad..0000000000
--- a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.Scala
+++ /dev/null
@@ -1,307 +0,0 @@
-/*
- * © 2024. TU Dortmund University,
- * Institute of Energy Systems, Energy Efficiency and Energy Economics,
- * Research group Distribution grid planning and operation
- */
-/**
-package edu.ie3.simona.agent.participant.load.markov
-
-import edu.ie3.datamodel.models.input.system.LoadInput
-import edu.ie3.simona.agent.participant.ParticipantAgent.getAndCheckNodalVoltage
-import edu.ie3.simona.agent.participant.ParticipantAgentFundamentals
-import edu.ie3.simona.agent.participant.data.Data.PrimaryData.{ApparentPower, ZERO_POWER}
-import edu.ie3.simona.agent.participant.data.Data.SecondaryData
-import edu.ie3.simona.agent.participant.data.secondary.SecondaryDataService
-import edu.ie3.simona.agent.participant.statedata.BaseStateData.{FlexControlledData, ParticipantModelBaseStateData}
-import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
-import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.InputModelContainer
-import edu.ie3.simona.agent.state.AgentState
-import edu.ie3.simona.agent.state.AgentState.Idle
-import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
-import edu.ie3.simona.event.notifier.NotifierConfig
-import edu.ie3.simona.exceptions.agent.InconsistentStateException
-
-import edu.ie3.simona.model.participant.ModelState.ConstantState
-import edu.ie3.simona.model.participant.load.markov.{MarkovModel, MarkovRelevantData}
-import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
-import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage.{FlexResponse}
-
-import edu.ie3.util.quantities.QuantityUtils.RichQuantityDouble
-import edu.ie3.util.scala.quantities.ReactivePower
-import org.apache.pekko.actor.typed.{ActorRef => TypedActorRef}
-import org.apache.pekko.actor.{ActorRef, FSM}
-import squants.Dimensionless
-
-import java.time.ZonedDateTime
-import java.util.UUID
-import scala.reflect.{ClassTag, classTag}
-
-protected trait MarkovAgentFundamentals
- extends ParticipantAgentFundamentals[
- ApparentPower,
- MarkovRelevantData,
- ConstantState.type,
- ParticipantStateData[ApparentPower],
- LoadInput,
- LoadRuntimeConfig,
- MarkovModel,
- ] {
- this: MarkovAgent =>
- override protected val pdClassTag: ClassTag[ApparentPower] =
- classTag[ApparentPower]
- override val alternativeResult: ApparentPower = ZERO_POWER
-
- /** Determines the needed base state data in dependence of the foreseen
- * simulation mode of the agent.
- *
- * @param inputModel
- * Input model definition
- * @param modelConfig
- * Configuration of the model
- * @param services
- * Collection of services to register with
- * @param simulationStartDate
- * Real world time date time, when the simulation starts
- * @param simulationEndDate
- * Real world time date time, when the simulation ends
- * @param resolution
- * Agents regular time bin it wants to be triggered e.g one hour
- * @param requestVoltageDeviationThreshold
- * Threshold, after which two nodal voltage magnitudes from participant
- * power requests for the same tick are considered to be different
- * @param outputConfig
- * Config of the output behaviour for simulation results
- * @return
- * A child of [[ParticipantModelBaseStateData]] that reflects the behaviour
- * based on the data source definition
- */
-
- override def determineModelBaseStateData(
- inputModel: InputModelContainer[LoadInput],
- modelConfig: LoadRuntimeConfig,
- services: Iterable[SecondaryDataService[_ <: SecondaryData]],
- simulationStartDate: ZonedDateTime,
- simulationEndDate: ZonedDateTime,
- resolution: Long,
- requestVoltageDeviationThreshold: Double,
- outputConfig: NotifierConfig,
- maybeEmAgent: Option[TypedActorRef[FlexResponse]],
- ): ParticipantModelBaseStateData[
- ApparentPower,
- MarkovRelevantData,
- ConstantState.type,
- MarkovModel,
- ] = {
- /* Build the calculation model */
- val model =
- buildModel(
- inputModel,
- modelConfig,
- simulationStartDate,
- simulationEndDate,
- )
-
-override protected def createInitialState(
- baseStateData: ParticipantModelBaseStateData[
- ApparentPower,
- MarkovRelevantData,
- ConstantState.type,
- MarkovModel,
- ]
- ): ModelState.ConstantState.type =
- ConstantState
-
- override protected def createCalcRelevantData(
- baseStateData: ParticipantModelBaseStateData[
- ApparentPower,
- MarkovRelevantData,
- ConstantState.type,
- MarkovModel,
- ],
- tick: Long,
- ): MarkovRelevantData = {
-
- }
-
- /** Handle an active power change by flex control.
- * @param tick
- * Tick, in which control is issued
- * @param baseStateData
- * Base state data of the agent
- * @param data
- * Calculation relevant data
- * @param lastState
- * Last known model state
- * @param setPower
- * Setpoint active power
- * @return
- * Updated model state, a result model and a [[FlexChangeIndicator]]
- */
- def handleControlledPowerChange(
- tick: Long,
- baseStateData: ParticipantModelBaseStateData[
- ApparentPower,
- MarkovRelevantData,
- ConstantState.type,
- MarkovModel,
- ],
- data: MarkovRelevantData,
- lastState: ConstantState.type,
- setPower: squants.Power,
- ): (ConstantState.type, ApparentPower, FlexChangeIndicator) = {
- /* Calculate result */
- val voltage = getAndCheckNodalVoltage(baseStateData, tick)
-
- val reactivePower = baseStateData.model.calculateReactivePower(
- setPower,
- voltage,
- )
- val result = ApparentPower(setPower, reactivePower)
-
- /* Handle the request within the model */
- val (updatedState, flexChangeIndicator) =
- baseStateData.model.handleControlledPowerChange(data, lastState, setPower)
- (updatedState, result, flexChangeIndicator)
- }
-
- /** Partial function, that is able to transfer
- * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
- * into a pair of active and reactive power
- */
-
-override val calculateModelPowerFunc: (
- Long,
- ParticipantModelBaseStateData[
- ApparentPower,
- MarkovRelevantData,
- ConstantState.type,
- MarkovModel,
- ],
- ConstantState.type,
- Dimensionless,
- ) => ApparentPower =
- (
- _: Long,
- _: ParticipantModelBaseStateData[
- ApparentPower,
- MarkovRelevantData,
- ConstantState.type,
- MarkovModel,
- ],
- _,
- _: Dimensionless,
- )
-
- /** Calculate the power output of the participant utilising secondary data.
- * However, it might appear, that not the complete set of secondary data is
- * available for the given tick. This might especially be true, if the actor
- * has been additionally activated. This method thereby has to try and fill
- * up missing data with the last known data, as this is still supposed to be
- * valid. The secondary data therefore is put to the calculation relevant
- * data store. The next state is [[Idle]], sending a
- * [[edu.ie3.simona.ontology.messages.SchedulerMessage.Completion]] to
- * scheduler and using update result values.
- *
- * @param baseStateData
- * The base state data with collected secondary data
- * @param lastModelState
- * Optional last model state
- * @param currentTick
- * Tick, the trigger belongs to
- * @param scheduler
- * [[ActorRef]] to the scheduler in the simulation
- * @return
- * [[Idle]] with updated result values
- */
- override def calculatePowerWithSecondaryDataAndGoToIdle(
- baseStateData: ParticipantModelBaseStateData[
- ApparentPower,
- MarkovRelevantData,
- ConstantState.type,
- MarkovModel,
- ],
- lastModelState: ConstantState.type,
- currentTick: Long,
- scheduler: ActorRef,
- ): FSM.State[AgentState, ParticipantStateData[ApparentPower]] =
- throw new InconsistentStateException(
- s"Markov model is not able to calculate power with secondary data."
- )
-
- /** Determine the average result within the given tick window
- *
- * @param tickToResults
- * Mapping from data tick to actual data
- * @param windowStart
- * First, included tick of the time window
- * @param windowEnd
- * Last, included tick of the time window
- * @param activeToReactivePowerFuncOpt
- * An Option on a function, that transfers the active into reactive power
- * @return
- * The averaged result
- */
- override def averageResults(
- tickToResults: Map[Long, ApparentPower],
- windowStart: Long,
- windowEnd: Long,
- activeToReactivePowerFuncOpt: Option[
- Power => ReactivePower
- ] = None,
- ): ApparentPower =
- ParticipantAgentFundamentals.averageApparentPower(
- tickToResults,
- windowStart,
- windowEnd,
- activeToReactivePowerFuncOpt,
- log,
- )
-
- /** Determines the correct result.
- *
- * @param uuid
- * Unique identifier of the physical model
- * @param dateTime
- * Real world date of the result
- * @param result
- * The primary data to build a result model for
- * @return
- * The equivalent event
- */
- override protected def buildResult(
- uuid: UUID,
- dateTime: ZonedDateTime,
- result: ApparentPower,
- ): SystemParticipantResult =
- new LoadResult(
- dateTime,
- uuid,
- result.p.toMegawatts.asMegaWatt,
- result.q.toMegavars.asMegaVar,
- )
-
- /** Update the last known model state with the given external, relevant data
- *
- * @param tick
- * Tick to update state for
- * @param modelState
- * Last known model state
- * @param calcRelevantData
- * Data, relevant for calculation
- * @param nodalVoltage
- * Current nodal voltage of the agent
- * @param model
- * Model for calculation
- * @return
- * The updated state at given tick under consideration of calculation
- * relevant data
- */
- override protected def updateState(
- tick: Long,
- modelState: ModelState.ConstantState.type,
- calcRelevantData: MarkovRelevantData,
- nodalVoltage: squants.Dimensionless,
- model: MarkovModel,
- ): ModelState.ConstantState.type = modelState
-}
-*/
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.scala b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.scala
new file mode 100644
index 0000000000..ed6ebdcdc5
--- /dev/null
+++ b/src/main/scala/edu/ie3/simona/agent/participant/load/markov/MarkovAgentFundamentals.scala
@@ -0,0 +1,383 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+
+ package edu.ie3.simona.agent.participant.load.markov
+
+ import edu.ie3.datamodel.models.input.system.LoadInput
+ import edu.ie3.datamodel.models.result.system.{LoadResult, SystemParticipantResult}
+ import edu.ie3.simona.agent.participant.ParticipantAgent.getAndCheckNodalVoltage
+ import edu.ie3.simona.agent.participant.ParticipantAgentFundamentals
+ import edu.ie3.simona.agent.participant.data.Data.PrimaryData.{ApparentPower, ZERO_POWER}
+ import edu.ie3.simona.agent.participant.data.Data.SecondaryData
+ import edu.ie3.simona.agent.participant.data.secondary.SecondaryDataService
+ import edu.ie3.simona.agent.participant.load.LoadAgent
+ import edu.ie3.simona.agent.participant.statedata.BaseStateData.{FlexControlledData, ParticipantModelBaseStateData}
+ import edu.ie3.simona.agent.participant.statedata.ParticipantStateData
+ import edu.ie3.simona.agent.participant.statedata.ParticipantStateData.InputModelContainer
+ import edu.ie3.simona.agent.state.AgentState
+ import edu.ie3.simona.agent.state.AgentState.Idle
+ import edu.ie3.simona.config.SimonaConfig.LoadRuntimeConfig
+ import edu.ie3.simona.event.notifier.NotifierConfig
+ import edu.ie3.simona.exceptions.agent.InconsistentStateException
+ import edu.ie3.simona.model.participant.ModelState.ConstantState
+ import edu.ie3.simona.model.participant.load.LoadReference
+ import edu.ie3.simona.model.participant.load.markov.MarkovModel.MarkovRelevantData
+ import edu.ie3.simona.model.participant.load.markov.{MarkovModel, MarkovRelevantData}
+ import edu.ie3.simona.model.participant.{FlexChangeIndicator, ModelState}
+ import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage.FlexResponse
+ import edu.ie3.simona.util.TickUtil.TickLong
+ import edu.ie3.util.quantities.QuantityUtils.RichQuantityDouble
+ import edu.ie3.util.scala.OperationInterval
+ import edu.ie3.util.scala.quantities.ReactivePower
+ import org.apache.pekko.actor.typed.{ActorRef => TypedActorRef}
+ import org.apache.pekko.actor.{ActorRef, FSM}
+ import squants.{Dimensionless, Power}
+
+ import java.time.ZonedDateTime
+ import java.util.UUID
+ import scala.reflect.{ClassTag, classTag}
+
+ protected trait MarkovAgentFundamentals
+ extends ParticipantAgentFundamentals[
+ ApparentPower,
+ MarkovRelevantData,
+ ConstantState.type,
+ ParticipantStateData[ApparentPower],
+ LoadInput,
+ LoadRuntimeConfig,
+ MarkovModel,
+ ] {
+ this: MarkovAgent =>
+ override protected val pdClassTag: ClassTag[ApparentPower] =
+ classTag[ApparentPower]
+ override val alternativeResult: ApparentPower = ZERO_POWER
+
+ /** Determines the needed base state data in dependence of the foreseen
+ * simulation mode of the agent.
+ *
+ * @param inputModel
+ * Input model definition
+ * @param modelConfig
+ * Configuration of the model
+ * @param services
+ * Collection of services to register with
+ * @param simulationStartDate
+ * Real world time date time, when the simulation starts
+ * @param simulationEndDate
+ * Real world time date time, when the simulation ends
+ * @param resolution
+ * Agents regular time bin it wants to be triggered e.g one hour
+ * @param requestVoltageDeviationThreshold
+ * Threshold, after which two nodal voltage magnitudes from participant
+ * power requests for the same tick are considered to be different
+ * @param outputConfig
+ * Config of the output behaviour for simulation results
+ * @return
+ * A child of [[ParticipantModelBaseStateData]] that reflects the behaviour
+ * based on the data source definition
+ */
+
+ override def determineModelBaseStateData(
+ inputModel: InputModelContainer[LoadInput],
+ modelConfig: LoadRuntimeConfig,
+ services: Iterable[SecondaryDataService[_ <: SecondaryData]],
+ simulationStartDate: ZonedDateTime,
+ simulationEndDate: ZonedDateTime,
+ resolution: Long,
+ requestVoltageDeviationThreshold: Double,
+ outputConfig: NotifierConfig,
+ maybeEmAgent: Option[TypedActorRef[FlexResponse]],
+ ): ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData,
+ ModelState.ConstantState.type,
+ MarkovModel
+
+
+ ] = {
+ /* Build the calculation model */
+ val model =
+ buildModel(
+ inputModel,
+ modelConfig,
+ simulationStartDate,
+ simulationEndDate,
+ )
+
+ def createInitialState(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData,
+ ConstantState.type,
+ MarkovModel,
+ ]
+ ): ModelState.ConstantState.type =
+ ConstantState
+
+ def createCalcRelevantData(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ tick: Long,
+ ): MarkovRelevantData = MarkovRelevantData(currentTick.toDateTime(baseStateData.startDate))
+
+ /** Handle an active power change by flex control.
+ * @param tick
+ * Tick, in which control is issued
+ * @param baseStateData
+ * Base state data of the agent
+ * @param data
+ * Calculation relevant data
+ * @param lastState
+ * Last known model state
+ * @param setPower
+ * Setpoint active power
+ * @return
+ * Updated model state, a result model and a [[FlexChangeIndicator]]
+ */
+ def handleControlledPowerChange(
+ tick: Long,
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ data: MarkovRelevantData,
+ lastState: ConstantState.type,
+ setPower: squants.Power,
+ ): (ConstantState.type, ApparentPower, FlexChangeIndicator) = {
+ /* Calculate result */
+ val voltage = getAndCheckNodalVoltage(baseStateData, tick)
+
+ val reactivePower = baseStateData.model.calculateReactivePower(
+ setPower,
+ voltage,
+ )
+ val result = ApparentPower(setPower, reactivePower)
+
+ /* Handle the request within the model */
+ val (updatedState, flexChangeIndicator) =
+ baseStateData.model.handleControlledPowerChange(data, lastState, setPower)
+ (updatedState, result, flexChangeIndicator)
+ }
+
+ /** Partial function, that is able to transfer
+ * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
+ * into a pair of active and reactive power
+ */
+
+ /** Partial function, that is able to transfer
+ * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
+ * into a pair of active and reactive power
+ */
+ val calculateModelPowerFunc: (
+ Long,
+ ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData.type,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ ConstantState.type,
+ Dimensionless,
+ ) => ApparentPower = (
+ tick: Long,
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData.type,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ state: ConstantState.type,
+ voltage: Dimensionless,
+ ) =>
+ baseStateData.model.calculatePower(
+ tick,
+ voltage,
+ state,
+ MarkovRelevantData,
+ )
+
+ /** Calculate the power output of the participant utilising secondary data.
+ * However, it might appear, that not the complete set of secondary data is
+ * available for the given tick. This might especially be true, if the actor
+ * has been additionally activated. This method thereby has to try and fill
+ * up missing data with the last known data, as this is still supposed to be
+ * valid. The secondary data therefore is put to the calculation relevant
+ * data store. The next state is [[Idle]], sending a
+ * [[edu.ie3.simona.ontology.messages.SchedulerMessage.Completion]] to
+ * scheduler and using update result values.
+ *
+ * @param baseStateData
+ * The base state data with collected secondary data
+ * @param lastModelState
+ * Optional last model state
+ * @param currentTick
+ * Tick, the trigger belongs to
+ * @param scheduler
+ * [[ActorRef]] to the scheduler in the simulation
+ * @return
+ * [[Idle]] with updated result values
+ */
+ def calculatePowerWithSecondaryDataAndGoToIdle(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ lastModelState: ConstantState.type,
+ currentTick: Long,
+ scheduler: ActorRef,
+ ): FSM.State[AgentState, ParticipantStateData[ApparentPower]] =
+ throw new InconsistentStateException(
+ s"Markov model is not able to calculate power with secondary data."
+ )
+
+ /** Determine the average result within the given tick window
+ *
+ * @param tickToResults
+ * Mapping from data tick to actual data
+ * @param windowStart
+ * First, included tick of the time window
+ * @param windowEnd
+ * Last, included tick of the time window
+ * @param activeToReactivePowerFuncOpt
+ * An Option on a function, that transfers the active into reactive power
+ * @return
+ * The averaged result
+ */
+ def averageResults(
+ tickToResults: Map[Long, ApparentPower],
+ windowStart: Long,
+ windowEnd: Long,
+ activeToReactivePowerFuncOpt: Option[
+ Power => ReactivePower
+ ] = None,
+ ): ApparentPower =
+ ParticipantAgentFundamentals.averageApparentPower(
+ tickToResults,
+ windowStart,
+ windowEnd,
+ activeToReactivePowerFuncOpt,
+ log,
+ )
+
+ /** Determines the correct result.
+ *
+ * @param uuid
+ * Unique identifier of the physical model
+ * @param dateTime
+ * Real world date of the result
+ * @param result
+ * The primary data to build a result model for
+ * @return
+ * The equivalent event
+ */
+ def buildResult(
+ uuid: UUID,
+ dateTime: ZonedDateTime,
+ result: ApparentPower,
+ ): SystemParticipantResult =
+ new LoadResult(
+ dateTime,
+ uuid,
+ result.p.toMegawatts.asMegaWatt,
+ result.q.toMegavars.asMegaVar,
+ )
+
+ /** Update the last known model state with the given external, relevant data
+ *
+ * @param tick
+ * Tick to update state for
+ * @param modelState
+ * Last known model state
+ * @param calcRelevantData
+ * Data, relevant for calculation
+ * @param nodalVoltage
+ * Current nodal voltage of the agent
+ * @param model
+ * Model for calculation
+ * @return
+ * The updated state at given tick under consideration of calculation
+ * relevant data
+ */
+ def updateState(
+ tick: Long,
+ modelState: ModelState.ConstantState.type,
+ calcRelevantData: MarkovRelevantData,
+ nodalVoltage: squants.Dimensionless,
+ model: MarkovModel,
+ ): ModelState.ConstantState.type = modelState
+ }
+ }
+
+
+ object MarkovAgentFundamentals {
+ def buildModel(
+ inputModel: LoadInput,
+ operationInterval: OperationInterval,
+ modelConfig: LoadRuntimeConfig,
+ reference: LoadReference,
+ ): MarkovModel =
+ MarkovModel(
+ inputModel,
+ modelConfig.scaling,
+ operationInterval,
+ reference,
+ )
+
+ protected def createCalcRelevantData(
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData.type,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ tick: Long,
+ ): MarkovRelevantData.type =
+ MarkovRelevantData
+
+ /** Partial function, that is able to transfer
+ * [[ParticipantModelBaseStateData]] (holding the actual calculation model)
+ * into a pair of active and reactive power
+ */
+ val calculateModelPowerFunc: (
+ Long,
+ ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData.type,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ ConstantState.type,
+ Dimensionless,
+ ) => ApparentPower = (
+ tick: Long,
+ baseStateData: ParticipantModelBaseStateData[
+ ApparentPower,
+ MarkovRelevantData.type,
+ ConstantState.type,
+ MarkovModel,
+ ],
+ state: ConstantState.type,
+ voltage: Dimensionless,
+ ) =>
+ baseStateData.model.calculatePower(
+ tick,
+ voltage,
+ state,
+ MarkovRelevantData,
+ )
+
+
+
+
+ }
diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
index a54e36d11e..e2f91bae01 100644
--- a/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
+++ b/src/main/scala/edu/ie3/simona/model/participant/load/markov/MarkovModel.scala
@@ -6,17 +6,16 @@
package edu.ie3.simona.model.participant.load.markov
+import edu.ie3.datamodel.models.input.system.LoadInput
import edu.ie3.simona.agent.participant.data.Data.PrimaryData.ApparentPower
+import edu.ie3.simona.model.participant.CalcRelevantData.LoadRelevantData
import edu.ie3.simona.model.participant.ModelState.ConstantState
import edu.ie3.simona.model.participant.control.QControl
+import edu.ie3.simona.model.participant.load.LoadReference
import edu.ie3.simona.model.participant.load.markov.MarkovParamStore._
-import edu.ie3.simona.model.participant.{
- CalcRelevantData,
- FlexChangeIndicator,
- ModelState,
- SystemParticipant,
-}
+import edu.ie3.simona.model.participant.{CalcRelevantData, FlexChangeIndicator, ModelState, SystemParticipant}
import edu.ie3.simona.ontology.messages.flex.FlexibilityMessage
+import edu.ie3.util.quantities.PowerSystemUnits
import edu.ie3.util.scala.OperationInterval
import squants.Dimensionless
import squants.energy.{Kilowatts, Power}
@@ -127,3 +126,35 @@ class MarkovRelevantData extends CalcRelevantData {
val by_Type_Map = Type()
val load_Ts_Map = load_TS()
}
+
+
+object MarkovModel {
+ case object MarkovRelevantData extends LoadRelevantData
+
+ def apply(
+ input: LoadInput,
+ scalingFactor: Double,
+ operationInterval: OperationInterval,
+ reference: LoadReference,
+ ): MarkovModel = {
+
+ val scaledInput = input.copy().scale(scalingFactor).build()
+
+ val model = MarkovModel(
+ scaledInput.getUuid,
+ scaledInput.getId,
+ operationInterval,
+ QControl(scaledInput.getqCharacteristics()),
+ Kilowatts(
+ scaledInput.getsRated
+ .to(PowerSystemUnits.KILOWATT)
+ .getValue
+ .doubleValue
+ ),
+ scaledInput.getCosPhiRated
+ )
+ model.enable()
+ model
+ }
+
+}