diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9a7f37e5..67c2194b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
+### Changed
+- Actor-based implementation for better concurrency
## [1.0.0] - 2021-08-03
### Added
diff --git a/build.gradle b/build.gradle
index adda2915..686db311 100644
--- a/build.gradle
+++ b/build.gradle
@@ -18,7 +18,7 @@ ext {
scalaVersion = '2.13'
scalaBinaryVersion = '2.13.6'
- tscfgVersion = '0.9.986'
+ tscfgVersion = '0.9.993'
slf4jVersion = '1.7.32'
scriptsLocation = 'gradle' + File.separator + 'scripts' + File.separator //location of script plugins
@@ -64,11 +64,11 @@ dependencies {
implementation 'tech.units:indriya:2.1.2'
// logging
- implementation 'org.apache.logging.log4j:log4j-api:+' // log4j
- implementation 'org.apache.logging.log4j:log4j-core:+' // log4j
- implementation 'org.apache.logging.log4j:log4j-slf4j-impl:+' // log4j -> slf4j
+ implementation 'org.apache.logging.log4j:log4j-api:2.14.1' // log4j
+ implementation 'org.apache.logging.log4j:log4j-core:2.14.1' // log4j
+ implementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.14.1' // log4j -> slf4j
- implementation "com.typesafe.scala-logging:scala-logging_${scalaVersion}:+" // akka scala logging
+ implementation "com.typesafe.scala-logging:scala-logging_${scalaVersion}:3.9.4" // akka scala logging
implementation "org.slf4j:log4j-over-slf4j:${slf4jVersion}" // slf4j -> log4j
// NEW scala libs //
@@ -82,21 +82,31 @@ dependencies {
testImplementation "org.pegdown:pegdown:1.6.0" // HTML report for scalatest
implementation 'org.mockito:mockito-core:3.11.2' // mocking framework
+ // akka actor system //
+ implementation platform("com.typesafe.akka:akka-bom_${scalaVersion}:2.6.14")
+ implementation "com.typesafe.akka:akka-actor-typed_${scalaVersion}"
+ implementation "com.typesafe.akka:akka-stream_${scalaVersion}"
+ testImplementation "com.typesafe.akka:akka-actor-testkit-typed_${scalaVersion}"
+
// config //
- implementation 'com.typesafe:config:+'
+ implementation 'com.typesafe:config:1.4.1'
implementation "com.github.carueda:tscfg_2.13:${tscfgVersion}"
// cmd args parser //
implementation "com.github.scopt:scopt_${scalaVersion}:+"
/* Handling compressed archives */
- implementation "org.apache.commons:commons-compress:+"
+ implementation "org.apache.commons:commons-compress:1.21"
}
wrapper {
- gradleVersion = '7.1.1'
+ gradleVersion = '7.2'
}
tasks.withType(JavaCompile) {
options.encoding = 'UTF-8'
}
+
+tasks.withType(Javadoc){
+ options.encoding = 'UTF-8'
+}
\ No newline at end of file
diff --git a/docs/diagrams/protocol/protocol.puml b/docs/diagrams/protocol/protocol.puml
new file mode 100644
index 00000000..6a84d690
--- /dev/null
+++ b/docs/diagrams/protocol/protocol.puml
@@ -0,0 +1,56 @@
+@startuml
+
+participant Main
+participant Coordinator
+participant Converter
+participant GridConverter
+participant ResConverter
+collections ResConverterWorker
+participant Mutator
+
+Main -> Coordinator: Start(Config)
+== Init ==
+Coordinator --[#blue]> Converter: spawn & watch
+Coordinator -> Converter: Init(...)
+Converter --[#blue]> Mutator: spawn & watch
+Converter -> Mutator: Init(Config)
+Converter <- Mutator: Ready
+Coordinator <- Converter: ConverterInitialized
+Coordinator -> Converter: Convert(String)
+
+== Converting grid structure ==
+Converter --[#blue]> GridConverter: spawn & watch
+Converter -> GridConverter: ConvertGridStructure(...)
+Converter <- GridConverter: GridStructureConverted(...)
+note left: Now all other entities\ncan be converted (not)\nconsidering islanded\nnodes
+
+== Converting participants ==
+Converter --[#blue]> ResConverter: spawn & watch
+Converter -> ResConverter: Init(...)
+ResConverter --[#blue]> ResConverterWorker: spawn & watch
+Converter <- ResConverter: ResConverterReady
+Converter -> ResConverter: Convert(...)
+ResConverter -> ResConverterWorker: Convert(...)
+ResConverterWorker -> Mutator: PersistTimeSeries(...)
+activate Mutator
+ResConverterWorker <- Mutator: TimeSeriesPersisted(...)
+ResConverter <- ResConverterWorker: Converted(...)
+ResConverter --[#blue]> ResConverterWorker: terminate
+ResConverter <--[#blue] ResConverterWorker
+Converter <- ResConverter: ResConverted(...)
+
+== Mutate Grid structure ==
+Converter -> Mutator: PersistGridStructure(...)
+Converter -> Mutator: PersistTimeSeriesMapping(...)
+Converter -> Mutator: PersistNodalResults(...)
+Converter <- Mutator: GridStructurePersisted(...)
+Converter <- Mutator: TimeSeriesMappingPersisted(...)
+Converter <- Mutator: NodalResultsPersisted(...)
+
+== Shutdown ==
+Converter -> Mutator: Shutdown
+Converter <- Mutator: Done
+deactivate Mutator
+
+Coordinator <- Converter: Converted(String)
+@enduml
\ No newline at end of file
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
index 62d4c053..7454180f 100644
Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 05679dc3..ffed3a25 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.1.1-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
diff --git a/gradlew b/gradlew
index fbd7c515..744e882e 100755
--- a/gradlew
+++ b/gradlew
@@ -72,7 +72,7 @@ case "`uname`" in
Darwin* )
darwin=true
;;
- MINGW* )
+ MSYS* | MINGW* )
msys=true
;;
NONSTOP* )
@@ -130,7 +130,7 @@ fi
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
-
+
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
diff --git a/gradlew.bat b/gradlew.bat
index 5093609d..107acd32 100644
--- a/gradlew.bat
+++ b/gradlew.bat
@@ -40,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
-if "%ERRORLEVEL%" == "0" goto init
+if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
@@ -54,7 +54,7 @@ goto fail
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
-if exist "%JAVA_EXE%" goto init
+if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
@@ -64,21 +64,6 @@ echo location of your Java installation.
goto fail
-:init
-@rem Get command-line arguments, handling Windows variants
-
-if not "%OS%" == "Windows_NT" goto win9xME_args
-
-:win9xME_args
-@rem Slurp the command line arguments.
-set CMD_LINE_ARGS=
-set _SKIP=2
-
-:win9xME_args_slurp
-if "x%~1" == "x" goto execute
-
-set CMD_LINE_ARGS=%*
-
:execute
@rem Setup the command line
@@ -86,7 +71,7 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
-"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
diff --git a/src/main/resources/config-template.conf b/src/main/resources/config-template.conf
index 55a408d4..aa851f30 100644
--- a/src/main/resources/config-template.conf
+++ b/src/main/resources/config-template.conf
@@ -22,4 +22,5 @@ io {
}
conversion {
removeSwitches = "Boolean" | false
+ participantWorkersPerType = "Int" | 20
}
\ No newline at end of file
diff --git a/src/main/scala/edu/ie3/simbench/actor/Converter.scala b/src/main/scala/edu/ie3/simbench/actor/Converter.scala
new file mode 100644
index 00000000..99040892
--- /dev/null
+++ b/src/main/scala/edu/ie3/simbench/actor/Converter.scala
@@ -0,0 +1,751 @@
+package edu.ie3.simbench.actor
+
+import akka.actor.typed.ActorRef
+import akka.actor.typed.scaladsl.{ActorContext, Behaviors}
+import edu.ie3.datamodel.models.input.{MeasurementUnitInput, NodeInput}
+import edu.ie3.datamodel.models.input.connector.{
+ LineInput,
+ SwitchInput,
+ Transformer2WInput,
+ Transformer3WInput
+}
+import edu.ie3.datamodel.models.input.system.{FixedFeedInInput, LoadInput}
+import edu.ie3.datamodel.models.result.NodeResult
+import edu.ie3.simbench.exception.CodeValidationException
+import edu.ie3.simbench.io.{Downloader, SimbenchReader, Zipper}
+import edu.ie3.simbench.model.SimbenchCode
+import edu.ie3.simbench.model.datamodel.{GridModel, Node}
+import org.slf4j.Logger
+
+import java.nio.file.Path
+import java.util.UUID
+
+object Converter {
+ def apply(): Behaviors.Receive[ConverterMessage] = uninitialized
+
+ /**
+ * Receive information to set up myself accordingly
+ *
+ * @return Behavior to do so
+ */
+ def uninitialized: Behaviors.Receive[ConverterMessage] = Behaviors.receive {
+ case (
+ ctx,
+ Init(
+ simBenchCode,
+ baseUrl,
+ downloadTargetDirectory,
+ failDownloadOnExistingFiles,
+ inputFileEnding,
+ inputFileEncoding,
+ inputFileColumnSeparator,
+ removeSwitches,
+ amountOfWorkers,
+ useDirectoryHierarchy,
+ targetDirectory,
+ csvColumnSeparator,
+ compress,
+ converter
+ )
+ ) =>
+ ctx.log.info(s"$simBenchCode - Converter started.")
+
+ /* Initialize a mutator for this conversion and wait for it's reply */
+ spawnMutator(
+ simBenchCode,
+ useDirectoryHierarchy,
+ targetDirectory,
+ csvColumnSeparator,
+ compress,
+ ctx
+ )
+ initializing(
+ simBenchCode,
+ baseUrl,
+ downloadTargetDirectory,
+ failDownloadOnExistingFiles,
+ inputFileEnding,
+ inputFileEncoding,
+ inputFileColumnSeparator,
+ removeSwitches,
+ amountOfWorkers,
+ converter
+ )
+ }
+
+ /**
+ * Receive needed information while being in the process of initialization
+ *
+ * @param simBenchCode SimBench code to handle later
+ * @param downloadTargetDirectory Target directory for downloads
+ * @param baseUrl Base url of the SimBench website
+ * @param failDownloadOnExistingFiles Whether or not to fail, if target files already exist
+ * @param inputFileEnding Ending of input files
+ * @param inputFileEncoding Encoding of the input files
+ * @param inputFileColumnSeparator Column separator of the input data
+ * @param removeSwitches Whether or not to remove switches in final model
+ * @param amountOfWorkers Amount of workers to convert participants
+ * @param coordinator Reference to the coordinator
+ * @return Behavior to do so
+ */
+ def initializing(
+ simBenchCode: String,
+ baseUrl: String,
+ downloadTargetDirectory: String,
+ failDownloadOnExistingFiles: Boolean,
+ inputFileEnding: String,
+ inputFileEncoding: String,
+ inputFileColumnSeparator: String,
+ removeSwitches: Boolean,
+ amountOfWorkers: Int,
+ coordinator: ActorRef[Coordinator.CoordinatorMessage]
+ ): Behaviors.Receive[ConverterMessage] = Behaviors.receive {
+ case (ctx, MutatorInitialized(mutator)) =>
+ ctx.log.debug(s"$simBenchCode - Mutator is ready.")
+ coordinator ! Coordinator.ConverterInitialized(simBenchCode, ctx.self)
+ idle(
+ StateData(
+ simBenchCode,
+ downloadTargetDirectory,
+ baseUrl,
+ failDownloadOnExistingFiles,
+ inputFileEnding,
+ inputFileEncoding,
+ inputFileColumnSeparator,
+ removeSwitches,
+ amountOfWorkers,
+ mutator,
+ coordinator
+ )
+ )
+ }
+
+ def idle(
+ stateData: StateData
+ ): Behaviors.Receive[ConverterMessage] = Behaviors.receive {
+ case (ctx, Convert(simBenchCode)) =>
+ ctx.log.info(
+ s"$simBenchCode - Downloading data set from SimBench website"
+ )
+ val downloadDirectory = downloadModel(
+ simBenchCode,
+ stateData.downloadDirectory,
+ stateData.baseUrl,
+ stateData.failOnExistingFiles
+ )
+ ctx.log.debug(s"$simBenchCode - Reading in the SimBench data set")
+ val simBenchModel = readModel(
+ simBenchCode,
+ downloadDirectory,
+ stateData.inputFileColumnSeparator,
+ stateData.inputFileEnding,
+ stateData.inputFileEncoding
+ )
+
+ /* Spawning a grid converter and ask it to do some first conversions */
+ ctx.log.info(s"$simBenchCode - Start conversion of grid structure")
+ val gridConverter =
+ ctx.spawn(GridConverter(), s"gridConverter_${stateData.simBenchCode}")
+ gridConverter ! GridConverter.ConvertGridStructure(
+ simBenchCode,
+ simBenchModel.nodes,
+ simBenchModel.nodePFResults,
+ simBenchModel.externalNets,
+ simBenchModel.powerPlants,
+ simBenchModel.res,
+ simBenchModel.transformers2w,
+ simBenchModel.transformers3w,
+ simBenchModel.lines,
+ simBenchModel.switches,
+ simBenchModel.measurements,
+ stateData.removeSwitches,
+ ctx.self
+ )
+ converting(stateData, simBenchModel, gridConverter)
+ }
+
+ def converting(
+ stateData: StateData,
+ simBenchModel: GridModel,
+ gridConverter: ActorRef[GridConverter.GridConverterMessage],
+ awaitedResults: AwaitedResults = AwaitedResults.empty
+ ): Behaviors.Receive[ConverterMessage] = Behaviors.receive {
+ case (
+ ctx,
+ GridStructureConverted(
+ nodeConversion,
+ nodeResults,
+ lines,
+ transformers2w,
+ transformers3w,
+ switches,
+ measurements
+ )
+ ) =>
+ ctx.log.debug(
+ s"${stateData.simBenchCode} - Grid structure has been converted."
+ )
+
+ ctx.log.info(
+ s"${stateData.simBenchCode} - Starting conversion of participant models"
+ )
+ if (simBenchModel.loads.nonEmpty) {
+ val loadConverter =
+ ctx.spawn(LoadConverter(), s"loadConverter_${stateData.simBenchCode}")
+ loadConverter ! LoadConverter.Init(
+ stateData.simBenchCode,
+ stateData.amountOfWorkers,
+ simBenchModel.loadProfiles,
+ stateData.mutator,
+ ctx.self
+ )
+ } else {
+ ctx.self ! LoadsConverted(Map.empty[LoadInput, UUID])
+ }
+ if (simBenchModel.res.nonEmpty) {
+ val resConverter =
+ ctx.spawn(ResConverter(), s"resConverter_${stateData.simBenchCode}")
+ resConverter ! ResConverter.Init(
+ stateData.simBenchCode,
+ stateData.amountOfWorkers,
+ simBenchModel.resProfiles,
+ stateData.mutator,
+ ctx.self
+ )
+ } else {
+ ctx.self ! ResConverted(Map.empty[FixedFeedInInput, UUID])
+ }
+ if (simBenchModel.powerPlants.nonEmpty) {
+ val powerPlantConverter =
+ ctx.spawn(
+ PowerPlantConverter(),
+ s"powerPlantConverter_${stateData.simBenchCode}"
+ )
+ powerPlantConverter ! PowerPlantConverter.Init(
+ stateData.simBenchCode,
+ stateData.amountOfWorkers,
+ simBenchModel.powerPlantProfiles,
+ stateData.mutator,
+ ctx.self
+ )
+ } else {
+ ctx.self ! PowerPlantsConverted(Map.empty[FixedFeedInInput, UUID])
+ }
+
+ converting(
+ stateData,
+ simBenchModel,
+ gridConverter,
+ awaitedResults.copy(
+ nodeConversion = Some(nodeConversion),
+ nodeResults = Some(nodeResults),
+ lines = Some(lines),
+ transformers2w = Some(transformers2w),
+ transformers3w = Some(transformers3w),
+ switches = Some(switches),
+ measurements = Some(measurements)
+ )
+ )
+
+ case (ctx, LoadConverterReady(loadConverter)) =>
+ ctx.log.debug(
+ s"${stateData.simBenchCode} - LoadConverter is ready. Request conversion."
+ )
+ loadConverter ! LoadConverter.Convert(
+ stateData.simBenchCode,
+ simBenchModel.loads,
+ awaitedResults.nodeConversion.getOrElse(Map.empty),
+ ctx.self
+ )
+ Behaviors.same
+
+ case (ctx, ResConverterReady(resConverter)) =>
+ ctx.log.debug(
+ s"${stateData.simBenchCode} - ResConverter is ready. Request conversion."
+ )
+ resConverter ! ResConverter.Convert(
+ stateData.simBenchCode,
+ simBenchModel.res,
+ awaitedResults.nodeConversion.getOrElse(Map.empty),
+ ctx.self
+ )
+ Behaviors.same
+
+ case (ctx, PowerPlantConverterReady(powerPlantConverter)) =>
+ ctx.log.debug(
+ s"${stateData.simBenchCode} - PowerPlantConverter is ready. Request conversion."
+ )
+ powerPlantConverter ! PowerPlantConverter.Convert(
+ stateData.simBenchCode,
+ simBenchModel.powerPlants,
+ awaitedResults.nodeConversion.getOrElse(Map.empty),
+ ctx.self
+ )
+ Behaviors.same
+
+ case (ctx, LoadsConverted(converted)) =>
+ ctx.log.info(
+ s"${stateData.simBenchCode} - All loads are converted."
+ )
+ val updatedAwaitedResults = awaitedResults.copy(loads = Some(converted))
+
+ if (updatedAwaitedResults.isReady)
+ finalizeConversion(
+ stateData.simBenchCode,
+ updatedAwaitedResults,
+ stateData.mutator,
+ stateData.coordinator,
+ ctx.self,
+ ctx.log
+ )
+ else
+ converting(
+ stateData,
+ simBenchModel,
+ gridConverter,
+ updatedAwaitedResults
+ )
+
+ case (ctx, ResConverted(converted)) =>
+ ctx.log.info(
+ s"${stateData.simBenchCode} - All RES are converted."
+ )
+ val updatedAwaitedResults = awaitedResults.copy(res = Some(converted))
+
+ if (updatedAwaitedResults.isReady)
+ finalizeConversion(
+ stateData.simBenchCode,
+ updatedAwaitedResults,
+ stateData.mutator,
+ stateData.coordinator,
+ ctx.self,
+ ctx.log
+ )
+ else
+ converting(
+ stateData,
+ simBenchModel,
+ gridConverter,
+ updatedAwaitedResults
+ )
+
+ case (ctx, PowerPlantsConverted(converted)) =>
+ ctx.log.info(
+ s"${stateData.simBenchCode} - All power plants are converted."
+ )
+ val updatedAwaitedResults =
+ awaitedResults.copy(powerPlants = Some(converted))
+
+ if (updatedAwaitedResults.isReady)
+ finalizeConversion(
+ stateData.simBenchCode,
+ updatedAwaitedResults,
+ stateData.mutator,
+ stateData.coordinator,
+ ctx.self,
+ ctx.log
+ )
+ else
+ converting(
+ stateData,
+ simBenchModel,
+ gridConverter,
+ updatedAwaitedResults
+ )
+ }
+
+ def finalizing(
+ simBenchCode: String,
+ mutator: ActorRef[Mutator.MutatorMessage],
+ coordinator: ActorRef[Coordinator.CoordinatorMessage],
+ gridStructurePersisted: Boolean = false,
+ nodeResultsPersisted: Boolean = false,
+ timeSeriesMappingPersisted: Boolean = false
+ ): Behaviors.Receive[ConverterMessage] = Behaviors.receive {
+ case (ctx, MutatorTerminated) =>
+ ctx.log.debug(s"$simBenchCode - Mutator has terminated.")
+ ctx.log.info(
+ s"$simBenchCode - Shut down converter."
+ )
+ coordinator ! Coordinator.Converted(simBenchCode)
+ Behaviors.stopped
+
+ case (ctx, message) =>
+ val (
+ updatedGridStructurePersisted,
+ updatedNodeResultsPersisted,
+ updatedTimeSeriesMappingPersisted
+ ) = message match {
+ case GridStructurePersisted =>
+ ctx.log.debug(s"$simBenchCode - Grid structure is persisted")
+ (
+ true,
+ nodeResultsPersisted,
+ timeSeriesMappingPersisted
+ )
+ case NodalResultsPersisted =>
+ ctx.log.debug(s"$simBenchCode - Node results are persisted")
+ (
+ gridStructurePersisted,
+ true,
+ timeSeriesMappingPersisted
+ )
+ case TimeSeriesMappingPersisted =>
+ ctx.log.debug(s"$simBenchCode - Time series mapping is persisted")
+ (
+ gridStructurePersisted,
+ nodeResultsPersisted,
+ true
+ )
+ case unexpected =>
+ ctx.log.warn(
+ s"$simBenchCode - Received unexpected message '$unexpected'."
+ )
+ (
+ gridStructurePersisted,
+ nodeResultsPersisted,
+ timeSeriesMappingPersisted
+ )
+ }
+ if (updatedGridStructurePersisted && updatedNodeResultsPersisted && updatedTimeSeriesMappingPersisted) {
+ ctx.log.debug(
+ s"$simBenchCode - All models are persisted. Shut down mutator."
+ )
+ mutator ! Mutator.Terminate
+ }
+ finalizing(
+ simBenchCode,
+ mutator,
+ coordinator,
+ updatedGridStructurePersisted,
+ updatedNodeResultsPersisted,
+ updatedTimeSeriesMappingPersisted
+ )
+ }
+
+ private def spawnMutator(
+ simBenchCode: String,
+ useDirectoryHierarchy: Boolean,
+ targetDirectory: String,
+ csvColumnSeparator: String,
+ compress: Boolean,
+ ctx: ActorContext[ConverterMessage]
+ ): Unit = {
+ val mutator = ctx.spawn(Mutator(), s"mutator_$simBenchCode")
+ mutator ! Mutator.Init(
+ simBenchCode,
+ useDirectoryHierarchy,
+ targetDirectory,
+ csvColumnSeparator,
+ compress,
+ ctx.self
+ )
+ ctx.watchWith(mutator, MutatorTerminated)
+ }
+
+ /**
+ * Downloads the specified model and hands back the path, where the extracted files are located
+ *
+ * @param simBenchCode Code, that denotes the target SimBench model
+ * @param downloadDirectory Target directory for downloads
+ * @param baseUrl Base url of the SimBench website
+ * @param failOnExistingFiles Whether or not to fail, if target files already exist
+ * @return The path to the extracted data
+ */
+ private def downloadModel(
+ simBenchCode: String,
+ downloadDirectory: String,
+ baseUrl: String,
+ failOnExistingFiles: Boolean
+ ): Path = {
+ val downloader =
+ Downloader(
+ downloadDirectory,
+ baseUrl,
+ failOnExistingFiles
+ )
+ val downloadedFile =
+ downloader.download(
+ SimbenchCode(simBenchCode).getOrElse(
+ throw CodeValidationException(
+ s"'$simBenchCode' is no valid SimBench code."
+ )
+ )
+ )
+ Zipper.unzip(
+ downloadedFile,
+ downloader.downloadFolder,
+ failOnExistingFiles,
+ flattenDirectories = true
+ )
+ }
+
+ /**
+ * Read in the raw SimBench grid model
+ *
+ * @param simBenchCode Code of model to convert
+ * @param downloadDirectory Directory, where the files are downloaded to
+ * @param csvColumnSeparator Column separator of input files
+ * @param fileEnding Ending of input files
+ * @param fileEncoding Encoding of the file
+ * @return The raw SimBench model
+ */
+ private def readModel(
+ simBenchCode: String,
+ downloadDirectory: Path,
+ csvColumnSeparator: String,
+ fileEnding: String,
+ fileEncoding: String
+ ): GridModel = {
+ val simBenchReader = SimbenchReader(
+ simBenchCode,
+ downloadDirectory,
+ csvColumnSeparator,
+ fileEnding,
+ fileEncoding
+ )
+ simBenchReader.readGrid()
+ }
+
+ private def finalizeConversion(
+ simBenchCode: String,
+ awaitedResults: AwaitedResults,
+ mutator: ActorRef[Mutator.MutatorMessage],
+ coordinator: ActorRef[Coordinator.CoordinatorMessage],
+ self: ActorRef[ConverterMessage],
+ logger: Logger
+ ): Behaviors.Receive[ConverterMessage] = {
+ logger.info(
+ s"$simBenchCode - Persisting grid structure and associated particicpants."
+ )
+ /* Bring together all results and send them to the mutator */
+ mutator ! Mutator.PersistGridStructure(
+ simBenchCode,
+ awaitedResults.nodeConversion.map(_.values.toSet).getOrElse(Set.empty),
+ awaitedResults.lines.map(_.toSet).getOrElse {
+ logger.warn("Model does not contain lines.")
+ Set.empty
+ },
+ awaitedResults.transformers2w.map(_.toSet).getOrElse {
+ logger.warn("Model does not contain two winding transformers.")
+ Set.empty
+ },
+ awaitedResults.transformers3w.map(_.toSet).getOrElse {
+ logger.debug("Model does not contain three winding transformers.")
+ Set.empty
+ },
+ awaitedResults.switches.map(_.toSet).getOrElse {
+ logger.debug("Model does not contain switches.")
+ Set.empty
+ },
+ awaitedResults.measurements.map(_.toSet).getOrElse {
+ logger.debug("Model does not contain measurements.")
+ Set.empty
+ },
+ awaitedResults.loads.map(_.keys.toSet).getOrElse {
+ logger.debug("Model does not contain loads.")
+ Set.empty
+ },
+ (awaitedResults.res.map(_.keys).getOrElse {
+ logger.debug("Model does not contain renewable energy sources.")
+ Seq.empty[FixedFeedInInput]
+ } ++ awaitedResults.powerPlants.map(_.keys).getOrElse {
+ logger.debug("Model does not contain power plants.")
+ Seq.empty[FixedFeedInInput]
+ }).toSet,
+ self
+ )
+
+ /* Build the time series mapping and send it to the mutator */
+ val timeSeriesMapping: Map[UUID, UUID] = awaitedResults.loads
+ .map(
+ modelToTimeSeries =>
+ modelToTimeSeries.map {
+ case (model, uuid) => model.getUuid -> uuid
+ }
+ )
+ .getOrElse {
+ logger.warn("The model does not contain time series for loads.")
+ Map.empty[UUID, UUID]
+ } ++ awaitedResults.res
+ .map(
+ modelToTimeSeries =>
+ modelToTimeSeries.map {
+ case (model, uuid) => model.getUuid -> uuid
+ }
+ )
+ .getOrElse {
+ logger.warn(
+ "The model does not contain time series for renewable energy sources."
+ )
+ Map.empty[UUID, UUID]
+ } ++ awaitedResults.powerPlants
+ .map(
+ modelToTimeSeries =>
+ modelToTimeSeries.map {
+ case (model, uuid) => model.getUuid -> uuid
+ }
+ )
+ .getOrElse {
+ logger.warn("The model does not contain time series for power plants.")
+ Map.empty[UUID, UUID]
+ }
+
+ mutator ! Mutator.PersistTimeSeriesMapping(timeSeriesMapping, self)
+
+ /* Persist the nodal results */
+ mutator ! Mutator.PersistNodalResults(awaitedResults.nodeResults.getOrElse {
+ logger.warn("The model does not contain nodal results.")
+ Set.empty[NodeResult]
+ }.toSet, self)
+
+ finalizing(simBenchCode, mutator, coordinator)
+ }
+
+ final case class StateData(
+ simBenchCode: String,
+ downloadDirectory: String,
+ baseUrl: String,
+ failOnExistingFiles: Boolean,
+ inputFileEnding: String,
+ inputFileEncoding: String,
+ inputFileColumnSeparator: String,
+ removeSwitches: Boolean,
+ amountOfWorkers: Int,
+ mutator: ActorRef[Mutator.MutatorMessage],
+ coordinator: ActorRef[Coordinator.CoordinatorMessage]
+ )
+
+ final case class AwaitedResults(
+ nodeConversion: Option[Map[Node, NodeInput]],
+ nodeResults: Option[Vector[NodeResult]],
+ lines: Option[Vector[LineInput]],
+ transformers2w: Option[Vector[Transformer2WInput]],
+ transformers3w: Option[Vector[Transformer3WInput]],
+ switches: Option[Vector[SwitchInput]],
+ measurements: Option[Vector[MeasurementUnitInput]],
+ loads: Option[Map[LoadInput, UUID]],
+ res: Option[Map[FixedFeedInInput, UUID]],
+ powerPlants: Option[Map[FixedFeedInInput, UUID]]
+ ) {
+
+ /**
+ * Check, if all awaited results are there
+ *
+ * @return true, if nothing is missing
+ */
+ def isReady: Boolean =
+ Seq(
+ nodeConversion,
+ nodeResults,
+ lines,
+ transformers2w,
+ transformers3w,
+ switches,
+ measurements,
+ loads,
+ res,
+ powerPlants
+ ).forall(_.nonEmpty)
+ }
+ object AwaitedResults {
+ def empty =
+ new AwaitedResults(
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None
+ )
+ }
+
+ /** Messages, a converter will understand */
+ sealed trait ConverterMessage
+ final case class Init(
+ simBenchCode: String,
+ baseUrl: String,
+ downloadTargetDirectory: String,
+ failDownloadOnExistingFiles: Boolean,
+ inputFileEnding: String,
+ inputFileEncoding: String,
+ inputFileColumnSeparator: String,
+ removeSwitches: Boolean,
+ amountOfWorkers: Int,
+ useDirectoryHierarchy: Boolean,
+ targetDirectory: String,
+ csvColumnSeparator: String,
+ compressConverted: Boolean,
+ replyTo: ActorRef[Coordinator.CoordinatorMessage]
+ ) extends ConverterMessage
+
+ /**
+ * Report, that the attached [[Mutator]] is ready for action
+ *
+ * @param replyTo Reference to the mutator
+ */
+ final case class MutatorInitialized(replyTo: ActorRef[Mutator.MutatorMessage])
+ extends ConverterMessage
+
+ /**
+ * Report, that the [[Mutator]] has terminated
+ */
+ object MutatorTerminated extends ConverterMessage
+
+ /**
+ * Request to convert a certain SimBench model
+ *
+ * @param simBenchCode Code that denotes the model
+ */
+ final case class Convert(simBenchCode: String) extends ConverterMessage
+
+ /**
+ * Feedback, that a given set of nodes have been converted
+ *
+ * @param nodeConversion The conversion of nodes
+ * @param nodeResults Node results
+ * @param lines Lines
+ * @param transformers2w Two winding transformer
+ * @param transformers3w Three winding transformers
+ * @param switches Switches
+ * @param measurements Measurement devices
+ */
+ final case class GridStructureConverted(
+ nodeConversion: Map[Node, NodeInput],
+ nodeResults: Vector[NodeResult],
+ lines: Vector[LineInput],
+ transformers2w: Vector[Transformer2WInput],
+ transformers3w: Vector[Transformer3WInput],
+ switches: Vector[SwitchInput],
+ measurements: Vector[MeasurementUnitInput]
+ ) extends ConverterMessage
+
+ final case class ResConverterReady(
+ replyTo: ActorRef[ResConverter.ShuntConverterMessage]
+ ) extends ConverterMessage
+
+ final case class LoadConverterReady(
+ replyTo: ActorRef[LoadConverter.ShuntConverterMessage]
+ ) extends ConverterMessage
+
+ final case class PowerPlantConverterReady(
+ replyTo: ActorRef[PowerPlantConverter.ShuntConverterMessage]
+ ) extends ConverterMessage
+
+ final case class ResConverted(converted: Map[FixedFeedInInput, UUID])
+ extends ConverterMessage
+
+ final case class LoadsConverted(converted: Map[LoadInput, UUID])
+ extends ConverterMessage
+
+ final case class PowerPlantsConverted(converted: Map[FixedFeedInInput, UUID])
+ extends ConverterMessage
+
+ object GridStructurePersisted extends ConverterMessage
+ object TimeSeriesMappingPersisted extends ConverterMessage
+ object NodalResultsPersisted extends ConverterMessage
+}
diff --git a/src/main/scala/edu/ie3/simbench/actor/Coordinator.scala b/src/main/scala/edu/ie3/simbench/actor/Coordinator.scala
new file mode 100644
index 00000000..00942182
--- /dev/null
+++ b/src/main/scala/edu/ie3/simbench/actor/Coordinator.scala
@@ -0,0 +1,205 @@
+package edu.ie3.simbench.actor
+
+import akka.actor.typed.ActorRef
+import akka.actor.typed.scaladsl.{ActorContext, Behaviors}
+import edu.ie3.simbench.config.SimbenchConfig
+
+object Coordinator {
+ def apply(): Behaviors.Receive[CoordinatorMessage] = uninitialized
+
+ def uninitialized: Behaviors.Receive[CoordinatorMessage] = Behaviors.receive {
+ case (ctx, Start(config)) =>
+ ctx.log.info("Starting conversion with given config.")
+ config.io.simbenchCodes.headOption match {
+ case Some(firstSimBenchCode) =>
+ val remainingCodes =
+ config.io.simbenchCodes.filter(_ != firstSimBenchCode)
+
+ /* Start a converter and issue the conversion */
+ spawnConverter(
+ ctx,
+ firstSimBenchCode,
+ config.io.input.download.baseUrl,
+ config.io.input.download.folder,
+ config.io.input.download.failOnExistingFiles,
+ config.io.input.csv.fileEnding,
+ config.io.input.csv.fileEncoding,
+ config.io.input.csv.separator,
+ config.conversion.removeSwitches,
+ config.conversion.participantWorkersPerType,
+ config.io.output.csv.directoryHierarchy,
+ config.io.output.targetFolder,
+ config.io.output.csv.separator,
+ config.io.output.compress
+ )
+ val initializingConverters = List(firstSimBenchCode)
+
+ val stateData = StateData(
+ remainingCodes,
+ initializingConverters,
+ List.empty[String],
+ config.io.input.download.baseUrl,
+ config.io.input.download.folder,
+ config.io.input.download.failOnExistingFiles,
+ config.io.input.csv.fileEnding,
+ config.io.input.csv.fileEncoding,
+ config.io.input.csv.separator,
+ config.conversion.removeSwitches,
+ config.conversion.participantWorkersPerType,
+ config.io.output.csv.directoryHierarchy,
+ config.io.output.targetFolder,
+ config.io.output.csv.separator,
+ config.io.output.compress
+ )
+
+ idle(stateData)
+ case None =>
+ ctx.log.error("No models to convert. Shut down.")
+ Behaviors.stopped
+ }
+ }
+
+ def idle(
+ stateData: StateData
+ ): Behaviors.Receive[CoordinatorMessage] = Behaviors.receive {
+ case (ctx, ConverterInitialized(simBenchCode, replyTo)) =>
+ ctx.log.debug(s"Reading to convert SimBench mode '$simBenchCode'.")
+ replyTo ! Converter.Convert(simBenchCode)
+
+ val stillInitializingConverters =
+ stateData.initializingConverters.filterNot(_ == simBenchCode)
+ val yetActiveConverters = stateData.activeConverters :+ simBenchCode
+ idle(
+ stateData.copy(
+ initializingConverters = stillInitializingConverters,
+ activeConverters = yetActiveConverters
+ )
+ )
+ case (ctx, Converted(simBenchCode)) =>
+ /* A converter has completed. Report that and start a new converter. */
+ ctx.log.info(
+ s"$simBenchCode - Completely converted."
+ )
+ val stillActiveConverters =
+ stateData.activeConverters.filterNot(_ == simBenchCode)
+ stateData.simBenchCodes.headOption match {
+ case Some(nextSimBenchCode) =>
+ /* Spawn a converter and wait until it is ready */
+ val remainingCodes =
+ stateData.simBenchCodes.filter(_ != nextSimBenchCode)
+ spawnConverter(
+ ctx,
+ nextSimBenchCode,
+ stateData.baseUrl,
+ stateData.downloadTargetDirectory,
+ stateData.failDownloadOnExistingFiles,
+ stateData.inputFileEnding,
+ stateData.inputFileEncoding,
+ stateData.inputFileColumnSeparator,
+ stateData.removeSwitches,
+ stateData.amountOfWorkers,
+ stateData.useDirectoryHierarchy,
+ stateData.targetDirectory,
+ stateData.csvColumnSeparator,
+ stateData.compressConverted
+ )
+ val yetInitializingConverters = stateData.initializingConverters :+ nextSimBenchCode
+ idle(
+ stateData.copy(
+ simBenchCodes = remainingCodes,
+ activeConverters = stillActiveConverters,
+ initializingConverters = yetInitializingConverters
+ )
+ )
+ case None if stillActiveConverters.nonEmpty =>
+ ctx.log.debug(
+ s"Waiting for last ${stillActiveConverters.size} active converter(s)."
+ )
+ Behaviors.same
+ case None =>
+ ctx.log.info("All models have been converted. Shut down.")
+ Behaviors.stopped
+ }
+ }
+
+ def spawnConverter(
+ ctx: ActorContext[CoordinatorMessage],
+ simBenchCode: String,
+ baseUrl: String,
+ downloadTargetDirectory: String,
+ failDownloadOnExistingFiles: Boolean,
+ inputFileEnding: String,
+ inputFileEncoding: String,
+ inputFileColumnSeparator: String,
+ removeSwitches: Boolean,
+ amountOfWorkers: Int,
+ useDirectoryHierarchy: Boolean,
+ targetDirectory: String,
+ csvColumnSeparator: String,
+ compressConverted: Boolean
+ ): Unit = {
+ val converter = ctx.spawn(Converter(), s"converter_$simBenchCode")
+ converter ! Converter.Init(
+ simBenchCode,
+ baseUrl,
+ downloadTargetDirectory,
+ failDownloadOnExistingFiles,
+ inputFileEnding,
+ inputFileEncoding,
+ inputFileColumnSeparator,
+ removeSwitches,
+ amountOfWorkers,
+ useDirectoryHierarchy,
+ targetDirectory,
+ csvColumnSeparator,
+ compressConverted,
+ ctx.self
+ )
+ }
+
+ final case class StateData(
+ simBenchCodes: List[String],
+ initializingConverters: List[String],
+ activeConverters: List[String],
+ baseUrl: String,
+ downloadTargetDirectory: String,
+ failDownloadOnExistingFiles: Boolean,
+ inputFileEnding: String,
+ inputFileEncoding: String,
+ inputFileColumnSeparator: String,
+ removeSwitches: Boolean,
+ amountOfWorkers: Int,
+ useDirectoryHierarchy: Boolean,
+ targetDirectory: String,
+ csvColumnSeparator: String,
+ compressConverted: Boolean
+ )
+
+ /** Messages, a coordinator will understand */
+ sealed trait CoordinatorMessage
+
+ /**
+ * Request to start conversion
+ *
+ * @param config The config to use for conversion
+ */
+ final case class Start(config: SimbenchConfig) extends CoordinatorMessage
+
+ /**
+ * Reporting, that a certain converter is ready for action
+ *
+ * @param simBenchCode Code of the SimBench model it will handle
+ * @param replyTo Reference to where to reach the converter
+ */
+ final case class ConverterInitialized(
+ simBenchCode: String,
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends CoordinatorMessage
+
+ /**
+ * Back-reporting, that a given SimBench-code has been handled
+ *
+ * @param simBenchCode Handle SimBench code
+ */
+ final case class Converted(simBenchCode: String) extends CoordinatorMessage
+}
diff --git a/src/main/scala/edu/ie3/simbench/convert/GridConverter.scala b/src/main/scala/edu/ie3/simbench/actor/GridConverter.scala
similarity index 60%
rename from src/main/scala/edu/ie3/simbench/convert/GridConverter.scala
rename to src/main/scala/edu/ie3/simbench/actor/GridConverter.scala
index 4cb5540f..00f07fea 100644
--- a/src/main/scala/edu/ie3/simbench/convert/GridConverter.scala
+++ b/src/main/scala/edu/ie3/simbench/actor/GridConverter.scala
@@ -1,29 +1,16 @@
-package edu.ie3.simbench.convert
+package edu.ie3.simbench.actor
+import akka.actor.typed.ActorRef
+import akka.actor.typed.scaladsl.Behaviors
import com.typesafe.scalalogging.LazyLogging
-import edu.ie3.datamodel.io.source.TimeSeriesMappingSource
-import edu.ie3.datamodel.io.source.TimeSeriesMappingSource.MappingEntry
+import edu.ie3.datamodel.models.input.NodeInput
import edu.ie3.datamodel.models.input.connector.{
LineInput,
SwitchInput,
Transformer2WInput,
Transformer3WInput
}
-import edu.ie3.datamodel.models.input.container.{
- GraphicElements,
- JointGridContainer,
- RawGridElements,
- SystemParticipants
-}
-import edu.ie3.datamodel.models.input.graphics.{
- LineGraphicInput,
- NodeGraphicInput
-}
-import edu.ie3.datamodel.models.input.system._
-import edu.ie3.datamodel.models.input.NodeInput
import edu.ie3.datamodel.models.result.NodeResult
-import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries
-import edu.ie3.datamodel.models.value.{PValue, SValue, Value}
import edu.ie3.simbench.convert.NodeConverter.AttributeOverride.{
JoinOverride,
SubnetOverride
@@ -32,155 +19,162 @@ import edu.ie3.simbench.convert.types.{
LineTypeConverter,
Transformer2wTypeConverter
}
+import edu.ie3.simbench.convert._
import edu.ie3.simbench.exception.ConversionException
-import edu.ie3.simbench.model.datamodel.{
- GridModel,
- Line,
- Node,
- NodePFResult,
- Switch,
- Transformer2W,
- Transformer3W
-}
+import edu.ie3.simbench.model.datamodel._
+import edu.ie3.simbench.model.datamodel.types.LineType
-import java.util.UUID
import scala.annotation.tailrec
import scala.jdk.CollectionConverters._
import scala.collection.parallel.CollectionConverters._
case object GridConverter extends LazyLogging {
+ def apply(): Behaviors.Receive[GridConverterMessage] = idle
+
+ def idle: Behaviors.Receive[GridConverterMessage] = Behaviors.receive {
+ case (
+ ctx,
+ ConvertGridStructure(
+ simBenchCode,
+ nodes,
+ nodeResults,
+ externalNets,
+ powerPlants,
+ res,
+ transformers2w,
+ transformers3w,
+ lines,
+ switches,
+ measurements,
+ removeSwitches,
+ converter
+ )
+ ) =>
+ ctx.log.debug(
+ s"Got asked to convert the grid structure of SimBench model '$simBenchCode'."
+ )
+ val nodeConversion = convertNodes(
+ nodes,
+ externalNets,
+ powerPlants,
+ res,
+ transformers2w,
+ transformers3w,
+ lines,
+ switches,
+ removeSwitches
+ )
- /**
- * Converts a full simbench grid into power system data models [[JointGridContainer]]. Additionally, individual time
- * series for all system participants are delivered as well.
- *
- * @param simbenchCode Simbench code, that is used as identifier for the grid
- * @param gridInput Total grid input model to be converted
- * @param removeSwitches Whether or not to remove switches from the grid structure
- * @return A converted [[JointGridContainer]], a [[Vector]] of [[IndividualTimeSeries]] as well as a [[Vector]] of [[NodeResult]]s
- */
- def convert(
- simbenchCode: String,
- gridInput: GridModel,
- removeSwitches: Boolean
- ): (
- JointGridContainer,
- Vector[IndividualTimeSeries[_ <: PValue]],
- Seq[MappingEntry],
- Vector[NodeResult]
- ) = {
- logger.debug(s"Converting raw grid elements of '${gridInput.simbenchCode}'")
- val (rawGridElements, nodeConversion) =
- convertGridElements(gridInput, removeSwitches)
-
- logger.debug(
- s"Converting system participants and their time series of '${gridInput.simbenchCode}'"
- )
- val (systemParticipants, timeSeries, timeSeriesMapping) =
- convertParticipants(gridInput, nodeConversion)
+ val convertedLines = convertLines(lines, nodeConversion)
+ val convertedTransformers2w =
+ convertTransformers2w(transformers2w, nodeConversion)
+ val convertedTransformers3w = Vector.empty[Transformer3WInput] /* Currently, no conversion strategy is known */
+ if (transformers3w.nonEmpty)
+ ctx.log.debug(
+ "Creation of three winding transformers is not yet implemented."
+ )
+ val convertedSwitches =
+ if (!removeSwitches) SwitchConverter.convert(switches, nodeConversion)
+ else Vector.empty
+ val convertedMeasurements = MeasurementConverter
+ .convert(measurements, nodeConversion)
+
+ val nonIslandedNodes = filterIsolatedNodes(
+ nodeConversion,
+ convertedLines.toSet.asJava,
+ convertedTransformers2w.toSet.asJava,
+ convertedTransformers3w.toSet.asJava,
+ convertedSwitches.toSet.asJava
+ )
- logger.debug(
- s"Converting power flow results of '${gridInput.simbenchCode}'"
- )
- val powerFlowResults =
- convertNodeResults(gridInput.nodePFResults, nodeConversion)
+ val convertedResults = convertNodeResults(nodeResults, nonIslandedNodes)
- (
- new JointGridContainer(
- simbenchCode,
- rawGridElements,
- systemParticipants,
- new GraphicElements(
- Set.empty[NodeGraphicInput].asJava,
- Set.empty[LineGraphicInput].asJava
- )
- ),
- timeSeries,
- timeSeriesMapping,
- powerFlowResults
- )
+ converter ! Converter.GridStructureConverted(
+ nonIslandedNodes,
+ convertedResults,
+ convertedLines,
+ convertedTransformers2w,
+ convertedTransformers3w,
+ convertedSwitches,
+ convertedMeasurements
+ )
+ Behaviors.same
}
+ sealed trait GridConverterMessage
+ final case class ConvertGridStructure(
+ simBenchCode: String,
+ nodes: Vector[Node],
+ results: Vector[NodePFResult],
+ externalNets: Vector[ExternalNet],
+ powerPlants: Vector[PowerPlant],
+ res: Vector[RES],
+ transformers2w: Vector[Transformer2W],
+ transformers3w: Vector[Transformer3W],
+ lines: Vector[Line[_ <: LineType]],
+ switches: Vector[Switch],
+ measurements: Vector[Measurement],
+ removeSwitches: Boolean = false,
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends GridConverterMessage
+
/**
- * Converts all elements that do form the grid itself.
+ * Convert the nodes with all needed preliminary steps. This is determination of target subnets, correction of subnet
+ * number for high voltage switch gear and joining of nodes in case of closed switches.
*
- * @param gridInput Total grid input model to convert
- * @param removeSwitches Whether or not to remove switches from the grid structure
- * @return All grid elements in converted form + a mapping from old to new node models
+ * @param nodes Collection of nodes
+ * @param externalNets Collection of external grids
+ * @param powerPlants Collection of power plants
+ * @param res Collection of renewable energy sources
+ * @param transformers2w Collection of two winding transformers
+ * @param transformers3w Collection of three winding transformers
+ * @param lines Collection of lines
+ * @param switches Collection of switches
+ * @param removeSwitches Whether or not to remove closed switches
+ * @return A mapping from raw node models to their conversion
*/
- def convertGridElements(
- gridInput: GridModel,
- removeSwitches: Boolean
- ): (RawGridElements, Map[Node, NodeInput]) = {
+ private def convertNodes(
+ nodes: Vector[Node],
+ externalNets: Vector[ExternalNet],
+ powerPlants: Vector[PowerPlant],
+ res: Vector[RES],
+ transformers2w: Vector[Transformer2W],
+ transformers3w: Vector[Transformer3W],
+ lines: Vector[Line[_]],
+ switches: Vector[Switch],
+ removeSwitches: Boolean = false
+ ): Map[Node, NodeInput] = {
/* Set up a sub net converter, by crawling all nodes */
val subnetConverter = SubnetConverter(
- gridInput.nodes.map(node => (node.vmR, node.subnet))
+ nodes.map(node => (node.vmR, node.subnet))
)
val slackNodeKeys = NodeConverter.getSlackNodeKeys(
- gridInput.externalNets,
- gridInput.powerPlants,
- gridInput.res
+ externalNets,
+ powerPlants,
+ res
)
/* Collect overriding attributes for node conversion, based on special constellations within the grid */
val subnetOverrides = determineSubnetOverrides(
- gridInput.transformers2w,
- gridInput.transformers3w,
- gridInput.switches,
- gridInput.lines,
+ transformers2w,
+ transformers3w,
+ switches,
+ lines,
subnetConverter
)
val joinOverrides = if (removeSwitches) {
/* If switches are meant to be removed, join all nodes at closed switches */
- determineJoinOverrides(gridInput.switches, slackNodeKeys)
+ determineJoinOverrides(switches, slackNodeKeys)
} else
Vector.empty
- val nodeConversion =
- convertNodes(
- gridInput.nodes,
- slackNodeKeys,
- subnetConverter,
- subnetOverrides,
- joinOverrides
- )
-
- val lines = convertLines(gridInput, nodeConversion).toSet.asJava
- val transformers2w =
- convertTransformers2w(gridInput, nodeConversion).toSet.asJava
- val transformers3w = Set.empty[Transformer3WInput].asJava /* Currently, no conversion strategy is known */
- logger.debug(
- "Creation of three winding transformers is not yet implemented."
- )
- val switches =
- if (!removeSwitches)
- SwitchConverter.convert(gridInput.switches, nodeConversion).toSet.asJava
- else
- Set.empty[SwitchInput].asJava
- val measurements = MeasurementConverter
- .convert(gridInput.measurements, nodeConversion)
- .toSet
- .asJava
-
- val connectedNodes = filterIsolatedNodes(
- nodeConversion,
- lines,
- transformers2w,
- transformers3w,
- switches
- )
-
- (
- new RawGridElements(
- connectedNodes.values.toSet.asJava,
- lines,
- transformers2w,
- transformers3w,
- switches,
- measurements
- ),
- connectedNodes
+ convertNodes(
+ nodes,
+ slackNodeKeys,
+ subnetConverter,
+ subnetOverrides,
+ joinOverrides
)
}
@@ -192,11 +186,11 @@ case object GridConverter extends LazyLogging {
* will control the switches in a manner, that the lower grid needs for. Therefore, for all nodes that are upstream
* of a transformer's hv node and connected via switches, explicit subnet numbers are provided.
*
- * @param transformers2w Collection of two winding transformers
- * @param transformers3w Collection of three winding transformers
- * @param switches Collection of switches
- * @param lines Collection of lines
- * @param subnetConverter Converter to determine subnet numbers
+ * @param transformers2w Collection of two winding transformers
+ * @param transformers3w Collection of three winding transformers
+ * @param switches Collection of switches
+ * @param lines Collection of lines
+ * @param subnetConverter Converter to determine subnet numbers
* @return A collection of [[SubnetOverride]]s
*/
private def determineSubnetOverrides(
@@ -318,8 +312,8 @@ case object GridConverter extends LazyLogging {
/**
* Determine join overrides for all nodes, that are connected by closed switches
*
- * @param switches Collection of all (closed) switches
- * @param slackNodeKeys Collection of node keys, that are foreseen to be slack nodes
+ * @param switches Collection of all (closed) switches
+ * @param slackNodeKeys Collection of node keys, that are foreseen to be slack nodes
* @return A collection of [[JoinOverride]]s
*/
private def determineJoinOverrides(
@@ -364,8 +358,8 @@ case object GridConverter extends LazyLogging {
/**
* Determine overrides per switch group
*
- * @param switchGroup A group of directly connected switches
- * @param slackNodeKeys Collection of node keys, that are foreseen to be slack nodes
+ * @param switchGroup A group of directly connected switches
+ * @param slackNodeKeys Collection of node keys, that are foreseen to be slack nodes
* @return A collection of [[JoinOverride]]s
*/
private def determineJoinOverridesForSwitchGroup(
@@ -469,47 +463,43 @@ case object GridConverter extends LazyLogging {
input: Vector[NodePFResult],
nodeConversion: Map[Node, NodeInput]
): Vector[NodeResult] =
- input.par.map { nodePfResult =>
- val node = nodeConversion.getOrElse(
- nodePfResult.node,
- throw ConversionException(
- s"Cannot convert power flow result for node ${nodePfResult.node}, as the needed node conversion cannot be found."
- )
- )
- NodePFResultConverter.convert(nodePfResult, node)
+ input.par.flatMap { nodePfResult =>
+ nodeConversion
+ .get(nodePfResult.node)
+ .map(node => NodePFResultConverter.convert(nodePfResult, node))
}.seq
/**
* Converts the given lines.
*
- * @param gridInput Total grid input model to convert
+ * @param lines Lines to convert
* @param nodeConversion Already known conversion mapping of nodes
* @return A vector of converted line models
*/
private def convertLines(
- gridInput: GridModel,
+ lines: Vector[Line[_ <: LineType]],
nodeConversion: Map[Node, NodeInput]
): Vector[LineInput] = {
- val lineTypes = LineTypeConverter.convert(gridInput.lines)
- LineConverter.convert(gridInput.lines, lineTypes, nodeConversion)
+ val lineTypes = LineTypeConverter.convert(lines)
+ LineConverter.convert(lines, lineTypes, nodeConversion)
}
/**
* Converts the given two winding transformers.
*
- * @param gridInput Total grid input model to convert
+ * @param transformers Transformer models to convert
* @param nodeConversion Already known conversion mapping of nodes
* @return A vector of converted two winding transformer models
*/
private def convertTransformers2w(
- gridInput: GridModel,
+ transformers: Vector[Transformer2W],
nodeConversion: Map[Node, NodeInput]
): Vector[Transformer2WInput] = {
val transformerTypes = Transformer2wTypeConverter.convert(
- gridInput.transformers2w.map(_.transformerType)
+ transformers.map(_.transformerType)
)
Transformer2wConverter.convert(
- gridInput.transformers2w,
+ transformers,
transformerTypes,
nodeConversion
)
@@ -554,122 +544,4 @@ case object GridConverter extends LazyLogging {
connectedNodes
}
}
-
- /**
- * Converts all system participants and extracts their individual power time series
- *
- * @param gridInput Total grid input model to convert
- * @param nodeConversion Already known conversion mapping of nodes
- * @return A collection of converted system participants and their individual time series
- */
- def convertParticipants(
- gridInput: GridModel,
- nodeConversion: Map[Node, NodeInput]
- ): (
- SystemParticipants,
- Vector[IndividualTimeSeries[_ <: PValue]],
- Seq[MappingEntry]
- ) = {
- /* Convert all participant groups */
- logger.debug(
- s"Participants to convert:\n\tLoads: ${gridInput.loads.size}" +
- s"\n\tPower Plants: ${gridInput.powerPlants.size}\n\tRES: ${gridInput.res.size}"
- )
- val loadsToTimeSeries = convertLoads(gridInput, nodeConversion)
- logger.debug(
- s"Done converting ${gridInput.loads.size} loads including time series"
- )
- val powerPlantsToTimeSeries = convertPowerPlants(gridInput, nodeConversion)
- logger.debug(
- s"Done converting ${gridInput.powerPlants.size} power plants including time series"
- )
- val resToTimeSeries = convertRes(gridInput, nodeConversion)
- logger.debug(
- s"Done converting ${gridInput.res.size} RES including time series"
- )
-
- /* Map participant uuid onto time series */
- val participantsToTimeSeries = loadsToTimeSeries ++ powerPlantsToTimeSeries ++ resToTimeSeries
- val mapping = participantsToTimeSeries.map {
- case (model, timeSeries) =>
- new TimeSeriesMappingSource.MappingEntry(
- UUID.randomUUID(),
- model.getUuid,
- timeSeries.getUuid
- )
- }.toSeq
- val timeSeries: Vector[IndividualTimeSeries[_ >: SValue <: PValue]] =
- participantsToTimeSeries.map(_._2).toVector
-
- (
- new SystemParticipants(
- Set.empty[BmInput].asJava,
- Set.empty[ChpInput].asJava,
- Set.empty[EvcsInput].asJava,
- Set.empty[EvInput].asJava,
- (powerPlantsToTimeSeries.keySet ++ resToTimeSeries.keySet).asJava,
- Set.empty[HpInput].asJava,
- loadsToTimeSeries.keySet.asJava,
- Set.empty[PvInput].asJava,
- Set.empty[StorageInput].asJava,
- Set.empty[WecInput].asJava
- ),
- timeSeries,
- mapping
- )
- }
-
- /**
- * Converting all loads.
- *
- * @param gridInput Total grid input model to convert
- * @param nodeConversion Already known conversion mapping of nodes
- * @return A mapping from loads to their assigned, specific time series
- */
- def convertLoads(
- gridInput: GridModel,
- nodeConversion: Map[Node, NodeInput]
- ): Map[LoadInput, IndividualTimeSeries[SValue]] = {
- val loadProfiles = gridInput.loadProfiles
- .map(profile => profile.profileType -> profile)
- .toMap
- LoadConverter.convert(gridInput.loads, nodeConversion, loadProfiles)
- }
-
- /**
- * Converting all power plants.
- *
- * @param gridInput Total grid input model to convert
- * @param nodeConversion Already known conversion mapping of nodes
- * @return A mapping from power plants to their assigned, specific time series
- */
- def convertPowerPlants(
- gridInput: GridModel,
- nodeConversion: Map[Node, NodeInput]
- ): Map[FixedFeedInInput, IndividualTimeSeries[PValue]] = {
- val powerPlantProfiles = gridInput.powerPlantProfiles
- .map(profile => profile.profileType -> profile)
- .toMap
- PowerPlantConverter.convert(
- gridInput.powerPlants,
- nodeConversion,
- powerPlantProfiles
- )
- }
-
- /**
- * Converting all renewable energy source system.
- *
- * @param gridInput Total grid input model to convert
- * @param nodeConversion Already known conversion mapping of nodes
- * @return A mapping from renewable energy source system to their assigned, specific time series
- */
- def convertRes(
- gridInput: GridModel,
- nodeConversion: Map[Node, NodeInput]
- ): Map[FixedFeedInInput, IndividualTimeSeries[PValue]] = {
- val resProfiles =
- gridInput.resProfiles.map(profile => profile.profileType -> profile).toMap
- ResConverter.convert(gridInput.res, nodeConversion, resProfiles)
- }
}
diff --git a/src/main/scala/edu/ie3/simbench/actor/LoadConverter.scala b/src/main/scala/edu/ie3/simbench/actor/LoadConverter.scala
new file mode 100644
index 00000000..9859bb38
--- /dev/null
+++ b/src/main/scala/edu/ie3/simbench/actor/LoadConverter.scala
@@ -0,0 +1,233 @@
+package edu.ie3.simbench.actor
+
+import akka.actor.typed.{ActorRef, SupervisorStrategy}
+import akka.actor.typed.scaladsl.{Behaviors, Routers}
+import edu.ie3.datamodel.models.OperationTime
+import edu.ie3.datamodel.models.StandardLoadProfile.DefaultLoadProfiles
+import edu.ie3.datamodel.models.input.system.LoadInput
+import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed
+import edu.ie3.datamodel.models.input.{NodeInput, OperatorInput}
+import edu.ie3.simbench.convert.profiles.PowerProfileConverter
+import edu.ie3.simbench.convert.{NodeConverter, ShuntConverter}
+import edu.ie3.simbench.model.datamodel.profiles.{LoadProfile, LoadProfileType}
+import edu.ie3.simbench.model.datamodel.{Load, Node}
+import edu.ie3.util.quantities.PowerSystemUnits.{
+ KILOWATTHOUR,
+ MEGAVAR,
+ MEGAVOLTAMPERE,
+ MEGAWATT
+}
+import tech.units.indriya.quantity.Quantities
+
+import java.util.{Locale, UUID}
+
+case object LoadConverter
+ extends ShuntConverter
+ with ShuntConverterMessageSupport[Load, LoadProfile, LoadInput] {
+ def apply(): Behaviors.Receive[ShuntConverterMessage] = uninitialized
+
+ def uninitialized: Behaviors.Receive[ShuntConverterMessage] =
+ Behaviors.receive {
+ case (
+ ctx,
+ Init(simBenchCode, amountOfWorkers, profiles, mutator, converter)
+ ) =>
+ /* Prepare information */
+ val typeToProfile =
+ profiles.map(profile => profile.profileType -> profile).toMap
+
+ /* Set up a worker pool */
+ val workerPool = Routers
+ .pool(poolSize = amountOfWorkers) {
+ Behaviors
+ .supervise(LoadConverter.Worker())
+ .onFailure(SupervisorStrategy.restart)
+ }
+ /* Allow broadcast messages to init all workers */
+ .withBroadcastPredicate {
+ case _: WorkerMessage.Init => true
+ case _ => false
+ }
+ .withRoundRobinRouting()
+ val workerPoolProxy =
+ ctx.spawn(
+ workerPool,
+ s"LoadConverterWorkerPool_$simBenchCode"
+ )
+
+ workerPoolProxy ! WorkerMessage.Init(mutator)
+ converter ! Converter.LoadConverterReady(ctx.self)
+
+ idle(typeToProfile, workerPoolProxy)
+ }
+
+ def idle(
+ typeToProfile: Map[LoadProfileType, LoadProfile],
+ workerPool: ActorRef[WorkerMessage]
+ ): Behaviors.Receive[ShuntConverterMessage] = Behaviors.receive {
+ case (ctx, Convert(simBenchCode, input, nodes, converter)) =>
+ ctx.log.debug(s"Got request to convert loads from '$simBenchCode'.")
+ val activeConversions = input.map { plant =>
+ val node = NodeConverter.getNode(plant.node, nodes)
+ val profile =
+ PowerProfileConverter.getProfile(plant.profile, typeToProfile)
+
+ workerPool ! Worker.Convert(plant, node, profile, ctx.self)
+ (plant.id, plant.node.getKey)
+ }
+ converting(activeConversions, Map.empty, workerPool, converter)
+ }
+
+ def converting(
+ activeConversions: Vector[(String, Node.NodeKey)],
+ converted: Map[LoadInput, UUID],
+ workerPool: ActorRef[WorkerMessage],
+ converter: ActorRef[Converter.ConverterMessage]
+ ): Behaviors.Receive[ShuntConverterMessage] = Behaviors.receive {
+ case (ctx, Converted(id, node, fixedFeedInInput, timeSeriesUuid)) =>
+ val remainingConversions = activeConversions.filterNot(_ == (id, node))
+ val updatedConverted = converted + (fixedFeedInInput -> timeSeriesUuid)
+ ctx.log.debug(
+ s"Model '$id' at node '$node' is converted. ${remainingConversions.size} active conversions remaining."
+ )
+ /* Stop the children and myself, if all conversions are done. */
+ if (remainingConversions.isEmpty) {
+ ctx.stop(workerPool)
+ converter ! Converter.LoadsConverted(updatedConverted)
+ Behaviors.stopped
+ }
+ converting(remainingConversions, updatedConverted, workerPool, converter)
+ }
+
+ final case class Init(
+ simBenchCode: String,
+ amountOfWorkers: Int,
+ profiles: Vector[LoadProfile],
+ mutator: ActorRef[Mutator.MutatorMessage],
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends super.Init
+
+ /**
+ * Request to convert all given models
+ */
+ final case class Convert(
+ simBenchCode: String,
+ inputs: Vector[Load],
+ nodes: Map[Node, NodeInput],
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends super.Convert
+
+ final case class Converted(
+ id: String,
+ node: Node.NodeKey,
+ model: LoadInput,
+ timeSeriesUuid: UUID
+ ) extends super.Converted
+
+ object Worker {
+ def apply(): Behaviors.Receive[WorkerMessage] = uninitialized
+
+ def uninitialized: Behaviors.Receive[WorkerMessage] = Behaviors.receive {
+ case (_, WorkerMessage.Init(mutator)) =>
+ idle(mutator)
+ }
+
+ def idle(
+ mutator: ActorRef[Mutator.MutatorMessage],
+ awaitTimeSeriesPersistence: Map[
+ UUID,
+ (
+ String,
+ Node.NodeKey,
+ LoadInput,
+ ActorRef[ShuntConverterMessage]
+ )
+ ] = Map.empty
+ ): Behaviors.Receive[WorkerMessage] = Behaviors.receive {
+ case (ctx, Convert(input, node, profile, replyTo)) =>
+ Behaviors.same
+ ctx.log.debug(
+ s"Got request to convert load '${input.id} at node '${input.node.getKey} / ${node.getUuid}'."
+ )
+
+ val model = convertModel(input, node)
+
+ val timeSeries = PowerProfileConverter.convert(
+ profile,
+ Quantities.getQuantity(input.pLoad, MEGAWATT),
+ Quantities.getQuantity(input.qLoad, MEGAVAR)
+ )
+
+ mutator ! Mutator.PersistTimeSeries(timeSeries, ctx.self)
+ val updatedAwaitedTimeSeriesPersistence = awaitTimeSeriesPersistence + (timeSeries.getUuid -> (input.id, input.node.getKey, model, replyTo))
+ idle(mutator, updatedAwaitedTimeSeriesPersistence)
+
+ case (ctx, WorkerMessage.TimeSeriesPersisted(uuid)) =>
+ ctx.log.debug(s"Time series '$uuid' is fully persisted.")
+ awaitTimeSeriesPersistence.get(uuid) match {
+ case Some((id, nodeKey, model, replyTo)) =>
+ val remainingPersistence =
+ awaitTimeSeriesPersistence.filterNot(_._1 == uuid)
+
+ replyTo ! LoadConverter.Converted(id, nodeKey, model, uuid)
+
+ idle(mutator, remainingPersistence)
+ case None =>
+ ctx.log.warn(
+ s"Got informed, that the time series with uuid '$uuid' is persisted. But I didn't expect that to happen."
+ )
+ Behaviors.same
+ }
+ }
+
+ /**
+ * Override the abstract Request message with parameters, that suit your needs.
+ *
+ * @param model Model itself
+ * @param node Node, the converted model will be connected to
+ * @param profile The profile, that belongs to the model
+ * @param replyTo Address to reply to
+ */
+ final case class Convert(
+ override val model: Load,
+ override val node: NodeInput,
+ override val profile: LoadProfile,
+ override val replyTo: ActorRef[ShuntConverterMessage]
+ ) extends WorkerMessage.Convert[Load, LoadProfile, ShuntConverterMessage]
+
+ /**
+ * Converts a load. Different voltage regulation strategies are not covered, yet.
+ *
+ * @param input Input model
+ * @param node Node, the renewable energy source system is connected to
+ * @param uuid Option to a specific uuid
+ * @return A [[LoadInput]]
+ */
+ def convertModel(
+ input: Load,
+ node: NodeInput,
+ uuid: Option[UUID] = None
+ ): LoadInput = {
+ val id = input.id
+ val cosphi = cosPhi(input.pLoad, input.qLoad)
+ val varCharacteristicString =
+ "cosPhiFixed:{(0.0,%#.2f)}".formatLocal(Locale.ENGLISH, cosphi)
+ val eCons = Quantities.getQuantity(0d, KILOWATTHOUR)
+ val sRated = Quantities.getQuantity(input.sR, MEGAVOLTAMPERE)
+
+ new LoadInput(
+ uuid.getOrElse(UUID.randomUUID()),
+ id,
+ OperatorInput.NO_OPERATOR_ASSIGNED,
+ OperationTime.notLimited(),
+ node,
+ new CosPhiFixed(varCharacteristicString),
+ DefaultLoadProfiles.NO_STANDARD_LOAD_PROFILE,
+ false,
+ eCons,
+ sRated,
+ cosphi
+ )
+ }
+ }
+}
diff --git a/src/main/scala/edu/ie3/simbench/actor/Mutator.scala b/src/main/scala/edu/ie3/simbench/actor/Mutator.scala
new file mode 100644
index 00000000..7bae952b
--- /dev/null
+++ b/src/main/scala/edu/ie3/simbench/actor/Mutator.scala
@@ -0,0 +1,253 @@
+package edu.ie3.simbench.actor
+
+import akka.actor.typed.ActorRef
+import akka.actor.typed.scaladsl.Behaviors
+import edu.ie3.datamodel.io.naming.{
+ DefaultDirectoryHierarchy,
+ EntityPersistenceNamingStrategy,
+ FileNamingStrategy
+}
+import edu.ie3.datamodel.io.sink.CsvFileSink
+import edu.ie3.datamodel.io.source.TimeSeriesMappingSource.MappingEntry
+import edu.ie3.datamodel.models.input.{MeasurementUnitInput, NodeInput}
+import edu.ie3.datamodel.models.input.connector.{
+ LineInput,
+ SwitchInput,
+ Transformer2WInput,
+ Transformer3WInput
+}
+import edu.ie3.datamodel.models.input.container.{
+ GraphicElements,
+ JointGridContainer,
+ RawGridElements,
+ SystemParticipants
+}
+import edu.ie3.datamodel.models.input.system.{
+ BmInput,
+ ChpInput,
+ EvInput,
+ EvcsInput,
+ FixedFeedInInput,
+ HpInput,
+ LoadInput,
+ PvInput,
+ StorageInput,
+ WecInput
+}
+import edu.ie3.datamodel.models.result.NodeResult
+import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries
+import edu.ie3.simbench.io.IoUtils
+import edu.ie3.util.io.FileIOUtils
+import org.apache.commons.io.FilenameUtils
+
+import java.nio.file.Paths
+import java.util.UUID
+import scala.concurrent.Await
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.duration.Duration
+import scala.jdk.FutureConverters.CompletionStageOps
+import scala.util.{Failure, Success}
+import scala.jdk.CollectionConverters._
+
+object Mutator {
+ def apply(): Behaviors.Receive[MutatorMessage] = uninitialized
+
+ def uninitialized: Behaviors.Receive[MutatorMessage] = Behaviors.receive {
+ case (
+ ctx,
+ Init(
+ simBenchCode,
+ useDirectoryHierarchy,
+ targetDirectory,
+ csvColumnSeparator,
+ compress,
+ replyTo
+ )
+ ) =>
+ ctx.log.debug(
+ s"Initializing a mutator for SimBench code '$simBenchCode'."
+ )
+
+ val baseTargetDirectory =
+ IoUtils.ensureHarmonizedAndTerminatingFileSeparator(targetDirectory)
+
+ /* Starting up a csv file sink */
+ val csvSink = if (useDirectoryHierarchy) {
+ new CsvFileSink(
+ baseTargetDirectory,
+ new FileNamingStrategy(
+ new EntityPersistenceNamingStrategy(),
+ new DefaultDirectoryHierarchy(baseTargetDirectory, simBenchCode)
+ ),
+ false,
+ csvColumnSeparator
+ )
+ } else {
+ new CsvFileSink(
+ baseTargetDirectory + simBenchCode,
+ new FileNamingStrategy(),
+ false,
+ csvColumnSeparator
+ )
+ }
+
+ replyTo ! Converter.MutatorInitialized(ctx.self)
+ idle(simBenchCode, csvSink, baseTargetDirectory, compress)
+ }
+
+ def idle(
+ simBenchCode: String,
+ sink: CsvFileSink,
+ targetDirectory: String,
+ compress: Boolean
+ ): Behaviors.Receive[MutatorMessage] =
+ Behaviors.receive {
+ case (ctx, PersistTimeSeries(timeSeries, replyTo)) =>
+ ctx.log.debug(
+ s"Got request to persist time series '${timeSeries.getUuid}'."
+ )
+ sink.persistTimeSeries(timeSeries)
+ replyTo ! WorkerMessage.TimeSeriesPersisted(timeSeries.getUuid)
+ Behaviors.same
+
+ case (
+ ctx,
+ PersistGridStructure(
+ simBenchCode,
+ nodes,
+ lines,
+ transformers2w,
+ transformers3w,
+ switches,
+ measurements,
+ loads,
+ fixedFeedIns,
+ replyTo
+ )
+ ) =>
+ ctx.log.debug("Got request to persist grid structure.")
+
+ val container = new JointGridContainer(
+ simBenchCode,
+ new RawGridElements(
+ nodes.asJava,
+ lines.asJava,
+ transformers2w.asJava,
+ transformers3w.asJava,
+ switches.asJava,
+ measurements.asJava
+ ),
+ new SystemParticipants(
+ Set.empty[BmInput].asJava,
+ Set.empty[ChpInput].asJava,
+ Set.empty[EvcsInput].asJava,
+ Set.empty[EvInput].asJava,
+ fixedFeedIns.asJava,
+ Set.empty[HpInput].asJava,
+ loads.asJava,
+ Set.empty[PvInput].asJava,
+ Set.empty[StorageInput].asJava,
+ Set.empty[WecInput].asJava
+ ),
+ new GraphicElements(
+ Set.empty[GraphicElements].asJava
+ )
+ )
+
+ sink.persistJointGrid(container)
+
+ replyTo ! Converter.GridStructurePersisted
+ Behaviors.same
+
+ case (ctx, PersistNodalResults(results, replyTo)) =>
+ ctx.log.debug("Got request to persist nodal results.")
+
+ sink.persistAll(results.asJava)
+ replyTo ! Converter.NodalResultsPersisted
+ Behaviors.same
+
+ case (ctx, PersistTimeSeriesMapping(mapping, replyTo)) =>
+ ctx.log.debug("Got request to persist time series mapping")
+
+ sink.persistAllIgnoreNested(
+ mapping
+ .map {
+ case (modelUuid, timeSeriesUuid) =>
+ new MappingEntry(UUID.randomUUID(), modelUuid, timeSeriesUuid)
+ }
+ .toList
+ .asJava
+ )
+ replyTo ! Converter.TimeSeriesMappingPersisted
+ Behaviors.same
+
+ case (ctx, Terminate) =>
+ ctx.log.debug("Got termination request")
+
+ if (compress) {
+ ctx.log.debug("Compressing output")
+ val rawOutputPath = Paths.get(targetDirectory + simBenchCode)
+ val archivePath = Paths.get(
+ FilenameUtils.concat(targetDirectory, simBenchCode + ".tar.gz")
+ )
+ val compressFuture =
+ FileIOUtils.compressDir(rawOutputPath, archivePath).asScala
+ compressFuture.onComplete {
+ case Success(_) =>
+ FileIOUtils.deleteRecursively(rawOutputPath)
+ case Failure(exception) =>
+ ctx.log.error(
+ s"Compression of output files to '$archivePath' has failed. Keep raw data.",
+ exception
+ )
+ }
+
+ Await.ready(compressFuture, Duration("180s"))
+ }
+
+ sink.shutdown()
+
+ Behaviors.stopped
+ }
+
+ /** Messages, a mutator will understand */
+ sealed trait MutatorMessage
+ final case class Init(
+ simBenchCode: String,
+ useDirectoryHierarchy: Boolean,
+ targetDirectory: String,
+ csvColumnSeparator: String,
+ compress: Boolean,
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends MutatorMessage
+
+ final case class PersistGridStructure(
+ simBenchCode: String,
+ nodes: Set[NodeInput],
+ lines: Set[LineInput],
+ transformers2w: Set[Transformer2WInput],
+ transformers3w: Set[Transformer3WInput],
+ switches: Set[SwitchInput],
+ measurements: Set[MeasurementUnitInput],
+ loads: Set[LoadInput],
+ fixedFeedIns: Set[FixedFeedInInput],
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends MutatorMessage
+
+ final case class PersistTimeSeriesMapping(
+ mapping: Map[UUID, UUID],
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends MutatorMessage
+
+ final case class PersistNodalResults(
+ results: Set[NodeResult],
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends MutatorMessage
+
+ final case class PersistTimeSeries(
+ timeSeries: IndividualTimeSeries[_],
+ replyTo: ActorRef[WorkerMessage]
+ ) extends MutatorMessage
+
+ object Terminate extends MutatorMessage
+}
diff --git a/src/main/scala/edu/ie3/simbench/actor/PowerPlantConverter.scala b/src/main/scala/edu/ie3/simbench/actor/PowerPlantConverter.scala
new file mode 100644
index 00000000..b4bc2cd8
--- /dev/null
+++ b/src/main/scala/edu/ie3/simbench/actor/PowerPlantConverter.scala
@@ -0,0 +1,244 @@
+package edu.ie3.simbench.actor
+
+import akka.actor.typed.{ActorRef, SupervisorStrategy}
+import akka.actor.typed.scaladsl.{Behaviors, Routers}
+import edu.ie3.datamodel.models.OperationTime
+import edu.ie3.datamodel.models.input.system.FixedFeedInInput
+import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed
+import edu.ie3.datamodel.models.input.{NodeInput, OperatorInput}
+import edu.ie3.simbench.convert.profiles.PowerProfileConverter
+import edu.ie3.simbench.convert.{NodeConverter, ShuntConverter}
+import edu.ie3.simbench.model.datamodel.profiles.{
+ PowerPlantProfile,
+ PowerPlantProfileType
+}
+import edu.ie3.simbench.model.datamodel.{Node, PowerPlant}
+import edu.ie3.util.quantities.PowerSystemUnits.{
+ MEGAVAR,
+ MEGAVOLTAMPERE,
+ MEGAWATT
+}
+import tech.units.indriya.quantity.Quantities
+
+import java.util.{Locale, UUID}
+
+case object PowerPlantConverter
+ extends ShuntConverter
+ with ShuntConverterMessageSupport[
+ PowerPlant,
+ PowerPlantProfile,
+ FixedFeedInInput
+ ] {
+ def apply(): Behaviors.Receive[ShuntConverterMessage] = uninitialized
+
+ def uninitialized: Behaviors.Receive[ShuntConverterMessage] =
+ Behaviors.receive {
+ case (
+ ctx,
+ Init(simBenchCode, amountOfWorkers, profiles, mutator, converter)
+ ) =>
+ /* Prepare information */
+ val typeToProfile =
+ profiles.map(profile => profile.profileType -> profile).toMap
+
+ /* Set up a worker pool */
+ val workerPool = Routers
+ .pool(poolSize = amountOfWorkers) {
+ Behaviors
+ .supervise(PowerPlantConverter.Worker())
+ .onFailure(SupervisorStrategy.restart)
+ }
+ /* Allow broadcast messages to init all workers */
+ .withBroadcastPredicate {
+ case _: WorkerMessage.Init => true
+ case _ => false
+ }
+ .withRoundRobinRouting()
+ val workerPoolProxy =
+ ctx.spawn(
+ workerPool,
+ s"PowerPlantConverterWorkerPool_$simBenchCode"
+ )
+
+ workerPoolProxy ! WorkerMessage.Init(mutator)
+ converter ! Converter.PowerPlantConverterReady(ctx.self)
+
+ idle(typeToProfile, workerPoolProxy)
+ }
+
+ def idle(
+ typeToProfile: Map[PowerPlantProfileType, PowerPlantProfile],
+ workerPool: ActorRef[WorkerMessage]
+ ): Behaviors.Receive[ShuntConverterMessage] = Behaviors.receive {
+ case (ctx, Convert(simBenchCode, input, nodes, converter)) =>
+ ctx.log.debug(
+ s"Got request to convert power plants from '$simBenchCode'."
+ )
+ val activeConversions = input.map { plant =>
+ val node = NodeConverter.getNode(plant.node, nodes)
+ val profile =
+ PowerProfileConverter.getProfile(plant.profile, typeToProfile)
+
+ workerPool ! Worker.Convert(plant, node, profile, ctx.self)
+ (plant.id, plant.node.getKey)
+ }
+ converting(activeConversions, Map.empty, workerPool, converter)
+ }
+
+ def converting(
+ activeConversions: Vector[(String, Node.NodeKey)],
+ converted: Map[FixedFeedInInput, UUID],
+ workerPool: ActorRef[WorkerMessage],
+ converter: ActorRef[Converter.ConverterMessage]
+ ): Behaviors.Receive[ShuntConverterMessage] = Behaviors.receive {
+ case (ctx, Converted(id, node, fixedFeedInInput, timeSeriesUuid)) =>
+ val remainingConversions = activeConversions.filterNot(_ == (id, node))
+ val updatedConverted = converted + (fixedFeedInInput -> timeSeriesUuid)
+ ctx.log.debug(
+ s"Model '$id' at node '$node' is converted. ${remainingConversions.size} active conversions remaining."
+ )
+ /* Stop the children and myself, if all conversions are done. */
+ if (remainingConversions.isEmpty) {
+ ctx.stop(workerPool)
+ converter ! Converter.PowerPlantsConverted(updatedConverted)
+ Behaviors.stopped
+ }
+ converting(remainingConversions, updatedConverted, workerPool, converter)
+ }
+
+ final case class Init(
+ simBenchCode: String,
+ amountOfWorkers: Int,
+ profiles: Vector[PowerPlantProfile],
+ mutator: ActorRef[Mutator.MutatorMessage],
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends super.Init
+
+ /**
+ * Request to convert all given models
+ */
+ final case class Convert(
+ simBenchCode: String,
+ inputs: Vector[PowerPlant],
+ nodes: Map[Node, NodeInput],
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends super.Convert
+
+ final case class Converted(
+ id: String,
+ node: Node.NodeKey,
+ model: FixedFeedInInput,
+ timeSeriesUuid: UUID
+ ) extends super.Converted
+
+ object Worker {
+ def apply(): Behaviors.Receive[WorkerMessage] = uninitialized
+
+ def uninitialized: Behaviors.Receive[WorkerMessage] = Behaviors.receive {
+ case (_, WorkerMessage.Init(mutator)) =>
+ idle(mutator)
+ }
+
+ def idle(
+ mutator: ActorRef[Mutator.MutatorMessage],
+ awaitTimeSeriesPersistence: Map[
+ UUID,
+ (
+ String,
+ Node.NodeKey,
+ FixedFeedInInput,
+ ActorRef[ShuntConverterMessage]
+ )
+ ] = Map.empty
+ ): Behaviors.Receive[WorkerMessage] = Behaviors.receive {
+ case (ctx, Convert(input, node, profile, replyTo)) =>
+ Behaviors.same
+ ctx.log.debug(
+ s"Got request to convert power plant '${input.id} at node '${input.node.getKey} / ${node.getUuid}'."
+ )
+
+ val model = convertModel(input, node)
+
+ val timeSeries = PowerProfileConverter.convert(
+ profile,
+ Quantities.getQuantity(input.p, MEGAWATT).multiply(-1)
+ )
+
+ mutator ! Mutator.PersistTimeSeries(timeSeries, ctx.self)
+ val updatedAwaitedTimeSeriesPersistence = awaitTimeSeriesPersistence + (timeSeries.getUuid -> (input.id, input.node.getKey, model, replyTo))
+ idle(mutator, updatedAwaitedTimeSeriesPersistence)
+
+ case (ctx, WorkerMessage.TimeSeriesPersisted(uuid)) =>
+ ctx.log.debug(s"Time series '$uuid' is fully persisted.")
+ awaitTimeSeriesPersistence.get(uuid) match {
+ case Some((id, nodeKey, model, replyTo)) =>
+ val remainingPersistence =
+ awaitTimeSeriesPersistence.filterNot(_._1 == uuid)
+
+ replyTo ! PowerPlantConverter.Converted(id, nodeKey, model, uuid)
+
+ idle(mutator, remainingPersistence)
+ case None =>
+ ctx.log.warn(
+ s"Got informed, that the time series with uuid '$uuid' is persisted. But I didn't expect that to happen."
+ )
+ Behaviors.same
+ }
+ }
+
+ /**
+ * Override the abstract Request message with parameters, that suit your needs.
+ *
+ * @param model Model itself
+ * @param node Node, the converted model will be connected to
+ * @param profile The profile, that belongs to the model
+ * @param replyTo Address to reply to
+ */
+ final case class Convert(
+ override val model: PowerPlant,
+ override val node: NodeInput,
+ override val profile: PowerPlantProfile,
+ override val replyTo: ActorRef[ShuntConverterMessage]
+ ) extends WorkerMessage.Convert[
+ PowerPlant,
+ PowerPlantProfile,
+ ShuntConverterMessage
+ ]
+
+ /**
+ * Converts a single power plant system to a fixed feed in model due to lacking information to sophistical guess
+ * typical types of assets. Different voltage regulation strategies are not covered, yet.
+ *
+ * @param input Input model
+ * @param node Node, the renewable energy source system is connected to
+ * @param uuid Option to a specific uuid
+ * @return A [[FixedFeedInInput]]
+ */
+ def convertModel(
+ input: PowerPlant,
+ node: NodeInput,
+ uuid: Option[UUID] = None
+ ): FixedFeedInInput = {
+ val p = Quantities.getQuantity(input.p, MEGAWATT)
+ val q = input.q match {
+ case Some(value) => Quantities.getQuantity(value, MEGAVAR)
+ case None => Quantities.getQuantity(0d, MEGAVAR)
+ }
+ val cosphi = cosPhi(p.getValue.doubleValue(), q.getValue.doubleValue())
+ val varCharacteristicString =
+ "cosPhiFixed:{(0.0,%#.2f)}".formatLocal(Locale.ENGLISH, cosphi)
+ val sRated = Quantities.getQuantity(input.sR, MEGAVOLTAMPERE)
+
+ new FixedFeedInInput(
+ uuid.getOrElse(UUID.randomUUID()),
+ input.id,
+ OperatorInput.NO_OPERATOR_ASSIGNED,
+ OperationTime.notLimited(),
+ node,
+ new CosPhiFixed(varCharacteristicString),
+ sRated,
+ cosphi
+ )
+ }
+ }
+}
diff --git a/src/main/scala/edu/ie3/simbench/actor/ResConverter.scala b/src/main/scala/edu/ie3/simbench/actor/ResConverter.scala
new file mode 100644
index 00000000..686862df
--- /dev/null
+++ b/src/main/scala/edu/ie3/simbench/actor/ResConverter.scala
@@ -0,0 +1,228 @@
+package edu.ie3.simbench.actor
+
+import akka.actor.typed.{ActorRef, SupervisorStrategy}
+import akka.actor.typed.scaladsl.{Behaviors, Routers}
+import edu.ie3.datamodel.models.OperationTime
+import edu.ie3.datamodel.models.input.system.FixedFeedInInput
+import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed
+import edu.ie3.datamodel.models.input.{NodeInput, OperatorInput}
+import edu.ie3.simbench.convert.profiles.PowerProfileConverter
+import edu.ie3.simbench.convert.{NodeConverter, ShuntConverter}
+import edu.ie3.simbench.model.datamodel.profiles.{ResProfile, ResProfileType}
+import edu.ie3.simbench.model.datamodel.{Node, RES}
+import edu.ie3.util.quantities.PowerSystemUnits.{
+ MEGAVAR,
+ MEGAVOLTAMPERE,
+ MEGAWATT
+}
+import tech.units.indriya.quantity.Quantities
+
+import java.util.{Locale, UUID}
+
+case object ResConverter
+ extends ShuntConverter
+ with ShuntConverterMessageSupport[RES, ResProfile, FixedFeedInInput] {
+ def apply(): Behaviors.Receive[ShuntConverterMessage] = uninitialized
+
+ def uninitialized: Behaviors.Receive[ShuntConverterMessage] =
+ Behaviors.receive {
+ case (
+ ctx,
+ Init(simBenchCode, amountOfWorkers, profiles, mutator, converter)
+ ) =>
+ /* Prepare information */
+ val typeToProfile =
+ profiles.map(profile => profile.profileType -> profile).toMap
+
+ /* Set up a worker pool */
+ val workerPool = Routers
+ .pool(poolSize = amountOfWorkers) {
+ Behaviors
+ .supervise(ResConverter.Worker())
+ .onFailure(SupervisorStrategy.restart)
+ }
+ /* Allow broadcast messages to init all workers */
+ .withBroadcastPredicate {
+ case _: WorkerMessage.Init => true
+ case _ => false
+ }
+ .withRoundRobinRouting()
+ val workerPoolProxy =
+ ctx.spawn(
+ workerPool,
+ s"ResConverterWorkerPool_$simBenchCode"
+ )
+
+ workerPoolProxy ! WorkerMessage.Init(mutator)
+ converter ! Converter.ResConverterReady(ctx.self)
+
+ idle(typeToProfile, workerPoolProxy)
+ }
+
+ def idle(
+ typeToProfile: Map[ResProfileType, ResProfile],
+ workerPool: ActorRef[WorkerMessage]
+ ): Behaviors.Receive[ShuntConverterMessage] = Behaviors.receive {
+ case (ctx, Convert(simBenchCode, res, nodes, converter)) =>
+ ctx.log.debug(s"Got request to convert res from '$simBenchCode'.")
+ val activeConversions = res.map { plant =>
+ val node = NodeConverter.getNode(plant.node, nodes)
+ val profile =
+ PowerProfileConverter.getProfile(plant.profile, typeToProfile)
+
+ workerPool ! Worker.Convert(plant, node, profile, ctx.self)
+ (plant.id, plant.node.getKey)
+ }
+ converting(activeConversions, Map.empty, workerPool, converter)
+ }
+
+ def converting(
+ activeConversions: Vector[(String, Node.NodeKey)],
+ converted: Map[FixedFeedInInput, UUID],
+ workerPool: ActorRef[WorkerMessage],
+ converter: ActorRef[Converter.ConverterMessage]
+ ): Behaviors.Receive[ShuntConverterMessage] = Behaviors.receive {
+ case (ctx, Converted(id, node, fixedFeedInInput, timeSeriesUuid)) =>
+ val remainingConversions = activeConversions.filterNot(_ == (id, node))
+ val updatedConverted = converted + (fixedFeedInInput -> timeSeriesUuid)
+ ctx.log.debug(
+ s"Model '$id' at node '$node' is converted. ${remainingConversions.size} active conversions remaining."
+ )
+ /* Stop the children and myself, if all conversions are done. */
+ if (remainingConversions.isEmpty) {
+ ctx.stop(workerPool)
+ converter ! Converter.ResConverted(updatedConverted)
+ Behaviors.stopped
+ }
+ converting(remainingConversions, updatedConverted, workerPool, converter)
+ }
+
+ final case class Init(
+ simBenchCode: String,
+ amountOfWorkers: Int,
+ profiles: Vector[ResProfile],
+ mutator: ActorRef[Mutator.MutatorMessage],
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends super.Init
+
+ /**
+ * Request to convert all given models
+ */
+ final case class Convert(
+ simBenchCode: String,
+ inputs: Vector[RES],
+ nodes: Map[Node, NodeInput],
+ replyTo: ActorRef[Converter.ConverterMessage]
+ ) extends super.Convert
+
+ final case class Converted(
+ id: String,
+ node: Node.NodeKey,
+ model: FixedFeedInInput,
+ timeSeriesUuid: UUID
+ ) extends super.Converted
+
+ object Worker {
+ def apply(): Behaviors.Receive[WorkerMessage] = uninitialized
+
+ def uninitialized: Behaviors.Receive[WorkerMessage] = Behaviors.receive {
+ case (_, WorkerMessage.Init(mutator)) =>
+ idle(mutator)
+ }
+
+ def idle(
+ mutator: ActorRef[Mutator.MutatorMessage],
+ awaitTimeSeriesPersistence: Map[
+ UUID,
+ (
+ String,
+ Node.NodeKey,
+ FixedFeedInInput,
+ ActorRef[ShuntConverterMessage]
+ )
+ ] = Map.empty
+ ): Behaviors.Receive[WorkerMessage] = Behaviors.receive {
+ case (ctx, Convert(res, node, profile, replyTo)) =>
+ Behaviors.same
+ ctx.log.debug(
+ s"Got request to convert RES '${res.id} at node '${res.node.getKey} / ${node.getUuid}'."
+ )
+
+ val model = convertModel(res, node)
+ /* Flip the sign, as infeed is negative in PowerSystemDataModel */
+ val timeSeries = PowerProfileConverter.convert(
+ profile,
+ Quantities.getQuantity(res.p, MEGAWATT).multiply(-1)
+ )
+
+ mutator ! Mutator.PersistTimeSeries(timeSeries, ctx.self)
+ val updatedAwaitedTimeSeriesPersistence = awaitTimeSeriesPersistence + (timeSeries.getUuid -> (res.id, res.node.getKey, model, replyTo))
+ idle(mutator, updatedAwaitedTimeSeriesPersistence)
+
+ case (ctx, WorkerMessage.TimeSeriesPersisted(uuid)) =>
+ ctx.log.debug(s"Time series '$uuid' is fully persisted.")
+ awaitTimeSeriesPersistence.get(uuid) match {
+ case Some((id, nodeKey, model, replyTo)) =>
+ val remainingPersistence =
+ awaitTimeSeriesPersistence.filterNot(_._1 == uuid)
+
+ replyTo ! ResConverter.Converted(id, nodeKey, model, uuid)
+
+ idle(mutator, remainingPersistence)
+ case None =>
+ ctx.log.warn(
+ s"Got informed, that the time series with uuid '$uuid' is persisted. But I didn't expect that to happen."
+ )
+ Behaviors.same
+ }
+ }
+
+ /**
+ * Override the abstract Request message with parameters, that suit your needs.
+ *
+ * @param model Model itself
+ * @param node Node, the converted model will be connected to
+ * @param profile The profile, that belongs to the model
+ * @param replyTo Address to reply to
+ */
+ final case class Convert(
+ override val model: RES,
+ override val node: NodeInput,
+ override val profile: ResProfile,
+ override val replyTo: ActorRef[ShuntConverterMessage]
+ ) extends WorkerMessage.Convert[RES, ResProfile, ShuntConverterMessage]
+
+ /**
+ * Converts a single renewable energy source system to a fixed feed in model due to lacking information to
+ * sophistical guess typical types of assets. Different voltage regulation strategies are not covered, yet.
+ *
+ * @param input Input model
+ * @param node Node, the renewable energy source system is connected to
+ * @param uuid Option to a specific uuid
+ * @return A [[FixedFeedInInput]]
+ */
+ def convertModel(
+ input: RES,
+ node: NodeInput,
+ uuid: Option[UUID] = None
+ ): FixedFeedInInput = {
+ val p = Quantities.getQuantity(input.p, MEGAWATT)
+ val q = Quantities.getQuantity(input.q, MEGAVAR)
+ val cosphi = cosPhi(p.getValue.doubleValue(), q.getValue.doubleValue())
+ val varCharacteristicString =
+ "cosPhiFixed:{(0.0,%#.2f)}".formatLocal(Locale.ENGLISH, cosphi)
+ val sRated = Quantities.getQuantity(input.sR, MEGAVOLTAMPERE)
+
+ new FixedFeedInInput(
+ uuid.getOrElse(UUID.randomUUID()),
+ input.id + "_" + input.resType.toString,
+ OperatorInput.NO_OPERATOR_ASSIGNED,
+ OperationTime.notLimited(),
+ node,
+ new CosPhiFixed(varCharacteristicString),
+ sRated,
+ cosphi
+ )
+ }
+ }
+}
diff --git a/src/main/scala/edu/ie3/simbench/actor/ShuntConverterMessageSupport.scala b/src/main/scala/edu/ie3/simbench/actor/ShuntConverterMessageSupport.scala
new file mode 100644
index 00000000..47da832b
--- /dev/null
+++ b/src/main/scala/edu/ie3/simbench/actor/ShuntConverterMessageSupport.scala
@@ -0,0 +1,38 @@
+package edu.ie3.simbench.actor
+
+import akka.actor.typed.ActorRef
+import edu.ie3.datamodel.models.input.NodeInput
+import edu.ie3.datamodel.models.input.system.SystemParticipantInput
+import edu.ie3.simbench.model.datamodel.{Node, ShuntModel}
+import edu.ie3.simbench.model.datamodel.profiles.ProfileModel
+
+import java.util.UUID
+
+trait ShuntConverterMessageSupport[I <: ShuntModel, P <: ProfileModel[_, _], R <: SystemParticipantInput] {
+ sealed trait ShuntConverterMessage
+
+ protected[actor] abstract class Init extends ShuntConverterMessage {
+ val simBenchCode: String
+ val amountOfWorkers: Int
+ val profiles: Vector[P]
+ val mutator: ActorRef[Mutator.MutatorMessage]
+ val replyTo: ActorRef[Converter.ConverterMessage]
+ }
+
+ protected[actor] abstract class Converted extends ShuntConverterMessage {
+ val id: String
+ val node: Node.NodeKey
+ val model: R
+ val timeSeriesUuid: UUID
+ }
+
+ /**
+ * Request to convert all given models
+ */
+ protected[actor] abstract class Convert extends ShuntConverterMessage {
+ val simBenchCode: String
+ val inputs: Vector[I]
+ val nodes: Map[Node, NodeInput]
+ val replyTo: ActorRef[Converter.ConverterMessage]
+ }
+}
diff --git a/src/main/scala/edu/ie3/simbench/actor/WorkerMessage.scala b/src/main/scala/edu/ie3/simbench/actor/WorkerMessage.scala
new file mode 100644
index 00000000..a592ab6d
--- /dev/null
+++ b/src/main/scala/edu/ie3/simbench/actor/WorkerMessage.scala
@@ -0,0 +1,27 @@
+package edu.ie3.simbench.actor
+
+import akka.actor.typed.ActorRef
+import edu.ie3.datamodel.models.input.NodeInput
+import edu.ie3.simbench.model.datamodel.ShuntModel
+import edu.ie3.simbench.model.datamodel.profiles.ProfileModel
+
+import java.util.UUID
+
+sealed trait WorkerMessage
+object WorkerMessage {
+ final case class Init(mutator: ActorRef[Mutator.MutatorMessage])
+ extends WorkerMessage
+
+ abstract protected[actor] class Convert[M <: ShuntModel, P <: ProfileModel[
+ _,
+ _
+ ], R]
+ extends WorkerMessage {
+ val model: M
+ val node: NodeInput
+ val profile: P
+ val replyTo: ActorRef[R]
+ }
+
+ final case class TimeSeriesPersisted(uuid: UUID) extends WorkerMessage
+}
diff --git a/src/main/scala/edu/ie3/simbench/config/ConfigValidator.scala b/src/main/scala/edu/ie3/simbench/config/ConfigValidator.scala
index 9ed27336..705a9efb 100644
--- a/src/main/scala/edu/ie3/simbench/config/ConfigValidator.scala
+++ b/src/main/scala/edu/ie3/simbench/config/ConfigValidator.scala
@@ -22,6 +22,7 @@ case object ConfigValidator {
@throws[SimbenchException]
def checkValidity(config: SimbenchConfig): Unit = {
checkValidity(config.io)
+ checkValidity(config.conversion)
}
/**
@@ -36,6 +37,21 @@ case object ConfigValidator {
checkSimbenchCodes(io.simbenchCodes)
}
+ /**
+ * Checks the validity of an conversion-config part. If any content is not valid, a
+ * [[SimbenchConfigException]] is thrown
+ *
+ * @param conversion The conversion config to validate
+ * @throws SimbenchException If any of the content is not as it is expected
+ */
+ @throws[SimbenchException]
+ private def checkValidity(conversion: SimbenchConfig.Conversion): Unit = {
+ if (conversion.participantWorkersPerType <= 0)
+ throw new SimbenchException(
+ "The amount of participant workers has to be positive!"
+ )
+ }
+
/**
* Checks the validity of the provided codes with the help of the permissible ones in [[edu.ie3.simbench.model.SimbenchCode]]
* @param codes The list of codes to check
diff --git a/src/main/scala/edu/ie3/simbench/config/SimbenchConfig.scala b/src/main/scala/edu/ie3/simbench/config/SimbenchConfig.scala
index 8f9caba7..e38fd541 100644
--- a/src/main/scala/edu/ie3/simbench/config/SimbenchConfig.scala
+++ b/src/main/scala/edu/ie3/simbench/config/SimbenchConfig.scala
@@ -1,4 +1,4 @@
-// generated by tscfg 0.9.986 on Mon Aug 09 20:12:24 CEST 2021
+// generated by tscfg 0.9.993 on Fri Aug 27 13:41:17 CEST 2021
// source: src/main/resources/config-template.conf
package edu.ie3.simbench.config
@@ -38,6 +38,7 @@ object SimbenchConfig {
}
final case class Conversion(
+ participantWorkersPerType: scala.Int,
removeSwitches: scala.Boolean
)
object Conversion {
@@ -47,6 +48,10 @@ object SimbenchConfig {
$tsCfgValidator: $TsCfgValidator
): SimbenchConfig.Conversion = {
SimbenchConfig.Conversion(
+ participantWorkersPerType =
+ if (c.hasPathOrNull("participantWorkersPerType"))
+ c.getInt("participantWorkersPerType")
+ else 20,
removeSwitches = c.hasPathOrNull("removeSwitches") && c.getBoolean(
"removeSwitches"
)
diff --git a/src/main/scala/edu/ie3/simbench/convert/LoadConverter.scala b/src/main/scala/edu/ie3/simbench/convert/LoadConverter.scala
deleted file mode 100644
index 8aa9f6cb..00000000
--- a/src/main/scala/edu/ie3/simbench/convert/LoadConverter.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-package edu.ie3.simbench.convert
-
-import java.util.{Locale, UUID}
-
-import edu.ie3.datamodel.models.OperationTime
-import edu.ie3.datamodel.models.StandardLoadProfile.DefaultLoadProfiles
-import edu.ie3.datamodel.models.input.system.LoadInput
-import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed
-import edu.ie3.datamodel.models.input.{NodeInput, OperatorInput}
-import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries
-import edu.ie3.datamodel.models.value.SValue
-import edu.ie3.simbench.convert.profiles.PowerProfileConverter
-import edu.ie3.simbench.model.datamodel.profiles.{LoadProfile, LoadProfileType}
-import edu.ie3.simbench.model.datamodel.{Load, Node}
-import edu.ie3.util.quantities.PowerSystemUnits.{
- KILOWATTHOUR,
- MEGAVAR,
- MEGAVOLTAMPERE,
- MEGAWATT
-}
-import tech.units.indriya.quantity.Quantities
-
-import scala.collection.parallel.CollectionConverters._
-
-case object LoadConverter extends ShuntConverter {
- def convert(
- loads: Vector[Load],
- nodes: Map[Node, NodeInput],
- profiles: Map[LoadProfileType, LoadProfile]
- ): Map[LoadInput, IndividualTimeSeries[SValue]] =
- loads.par
- .map { load =>
- val node = NodeConverter.getNode(load.node, nodes)
- val profile = PowerProfileConverter.getProfile(load.profile, profiles)
- convert(load, node, profile)
- }
- .seq
- .toMap
-
- /**
- * Converts a single SimBench [[Load]] to ie3's [[LoadInput]]. Currently not sufficiently covered:
- * - Consumed energy throughout the year
- * - different VAr characteristics
- *
- * @param input Input model
- * @param node Node, the load is connected to
- * @param profile SimBench load profile
- * @param uuid UUID to use for the model generation (default: Random UUID)
- * @return A [[LoadInput]] model
- */
- def convert(
- input: Load,
- node: NodeInput,
- profile: LoadProfile,
- uuid: UUID = UUID.randomUUID()
- ): (LoadInput, IndividualTimeSeries[SValue]) = {
- val id = input.id
- val cosphi = cosPhi(input.pLoad, input.qLoad)
- val varCharacteristicString =
- "cosPhiFixed:{(0.0,%#.2f)}".formatLocal(Locale.ENGLISH, cosphi)
- val eCons = Quantities.getQuantity(0d, KILOWATTHOUR)
- val sRated = Quantities.getQuantity(input.sR, MEGAVOLTAMPERE)
-
- val p = Quantities.getQuantity(input.pLoad, MEGAWATT)
- val q = Quantities.getQuantity(input.qLoad, MEGAVAR)
- val timeSeries = PowerProfileConverter.convert(profile, p, q)
-
- new LoadInput(
- uuid,
- id,
- OperatorInput.NO_OPERATOR_ASSIGNED,
- OperationTime.notLimited(),
- node,
- new CosPhiFixed(varCharacteristicString),
- DefaultLoadProfiles.NO_STANDARD_LOAD_PROFILE,
- false,
- eCons,
- sRated,
- cosphi
- ) ->
- timeSeries
- }
-}
diff --git a/src/main/scala/edu/ie3/simbench/convert/PowerPlantConverter.scala b/src/main/scala/edu/ie3/simbench/convert/PowerPlantConverter.scala
deleted file mode 100644
index 217050bd..00000000
--- a/src/main/scala/edu/ie3/simbench/convert/PowerPlantConverter.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-package edu.ie3.simbench.convert
-
-import java.util.{Locale, UUID}
-
-import edu.ie3.datamodel.models.OperationTime
-import edu.ie3.datamodel.models.input.system.FixedFeedInInput
-import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed
-import edu.ie3.datamodel.models.input.{NodeInput, OperatorInput}
-import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries
-import edu.ie3.datamodel.models.value.PValue
-import edu.ie3.simbench.convert.profiles.PowerProfileConverter
-import edu.ie3.simbench.model.datamodel.profiles.{
- PowerPlantProfile,
- PowerPlantProfileType
-}
-import edu.ie3.simbench.model.datamodel.{Node, PowerPlant}
-import edu.ie3.util.quantities.PowerSystemUnits.{
- MEGAVAR,
- MEGAVOLTAMPERE,
- MEGAWATT
-}
-import tech.units.indriya.quantity.Quantities
-
-import scala.collection.parallel.CollectionConverters._
-
-case object PowerPlantConverter extends ShuntConverter {
-
- /**
- * Convert a full set of power plants
- *
- * @param powerPlants Input models to convert
- * @param nodes Mapping from Simbench to power system data model node
- * @param profiles Collection of [[PowerPlantProfile]]s
- * @return A mapping from converted power plant to equivalent individual time series
- */
- def convert(
- powerPlants: Vector[PowerPlant],
- nodes: Map[Node, NodeInput],
- profiles: Map[PowerPlantProfileType, PowerPlantProfile]
- ): Map[FixedFeedInInput, IndividualTimeSeries[PValue]] =
- powerPlants.par
- .map { powerPlant =>
- val node = NodeConverter.getNode(powerPlant.node, nodes)
- val profile =
- PowerProfileConverter.getProfile(powerPlant.profile, profiles)
- convert(powerPlant, node, profile)
- }
- .seq
- .toMap
-
- /**
- * Converts a single power plant model to a fixed feed in model, as the power system data model does not reflect
- * power plants, yet. Voltage regulation strategies are also not correctly accounted for.
- *
- * @param input Input model
- * @param node Node, the power plant is connected to
- * @param profile SimBench power plant profile
- * @param uuid Option to a specific uuid
- * @return A pair of [[FixedFeedInInput]] and matching active power time series
- */
- def convert(
- input: PowerPlant,
- node: NodeInput,
- profile: PowerPlantProfile,
- uuid: Option[UUID] = None
- ): (FixedFeedInInput, IndividualTimeSeries[PValue]) = {
- val p = Quantities.getQuantity(input.p, MEGAWATT)
- val q = input.q match {
- case Some(value) => Quantities.getQuantity(value, MEGAVAR)
- case None => Quantities.getQuantity(0d, MEGAVAR)
- }
- val cosphi = cosPhi(p.getValue.doubleValue(), q.getValue.doubleValue())
- val varCharacteristicString =
- "cosPhiFixed:{(0.0,%#.2f)}".formatLocal(Locale.ENGLISH, cosphi)
- val sRated = Quantities.getQuantity(input.sR, MEGAVOLTAMPERE)
-
- /* Flip the sign, as infeed is negative in PowerSystemDataModel */
- val timeSeries = PowerProfileConverter.convert(profile, p.multiply(-1))
-
- new FixedFeedInInput(
- uuid.getOrElse(UUID.randomUUID()),
- input.id,
- OperatorInput.NO_OPERATOR_ASSIGNED,
- OperationTime.notLimited(),
- node,
- new CosPhiFixed(varCharacteristicString),
- sRated,
- cosphi
- ) -> timeSeries
- }
-}
diff --git a/src/main/scala/edu/ie3/simbench/convert/ResConverter.scala b/src/main/scala/edu/ie3/simbench/convert/ResConverter.scala
deleted file mode 100644
index 23937022..00000000
--- a/src/main/scala/edu/ie3/simbench/convert/ResConverter.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-package edu.ie3.simbench.convert
-
-import java.util.{Locale, UUID}
-
-import edu.ie3.datamodel.models.OperationTime
-import edu.ie3.datamodel.models.input.system.FixedFeedInInput
-import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed
-import edu.ie3.datamodel.models.input.{NodeInput, OperatorInput}
-import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries
-import edu.ie3.datamodel.models.value.PValue
-import edu.ie3.simbench.convert.profiles.PowerProfileConverter
-import edu.ie3.simbench.model.datamodel.profiles.{ResProfile, ResProfileType}
-import edu.ie3.simbench.model.datamodel.{Node, RES}
-import edu.ie3.util.quantities.PowerSystemUnits.{
- MEGAVAR,
- MEGAVOLTAMPERE,
- MEGAWATT
-}
-import tech.units.indriya.quantity.Quantities
-
-import scala.collection.parallel.CollectionConverters._
-
-case object ResConverter extends ShuntConverter {
-
- /**
- * Convert a full set of renewable energy source system
- *
- * @param res Input models to convert
- * @param nodes Mapping from Simbench to power system data model node
- * @param profiles Collection of [[ResProfile]]s
- * @return A mapping from converted renewable energy source system to equivalent individual time series
- */
- def convert(
- res: Vector[RES],
- nodes: Map[Node, NodeInput],
- profiles: Map[ResProfileType, ResProfile]
- ): Map[FixedFeedInInput, IndividualTimeSeries[PValue]] =
- res.par
- .map { plant =>
- val node = NodeConverter.getNode(plant.node, nodes)
- val profile =
- PowerProfileConverter.getProfile(plant.profile, profiles)
- convert(plant, node, profile)
- }
- .seq
- .toMap
-
- /**
- * Converts a single renewable energy source system to a fixed feed in model due to lacking information to
- * sophistically guess typical types of assets. Different voltage regulation strategies are not covered, yet.
- *
- * @param input Input model
- * @param node Node, the renewable energy source system is connected to
- * @param profile SimBench renewable energy source system profile
- * @param uuid Option to a specific uuid
- * @return A pair of [[FixedFeedInInput]] and matching active power time series
- */
- def convert(
- input: RES,
- node: NodeInput,
- profile: ResProfile,
- uuid: Option[UUID] = None
- ): (FixedFeedInInput, IndividualTimeSeries[PValue]) = {
- val p = Quantities.getQuantity(input.p, MEGAWATT)
- val q = Quantities.getQuantity(input.q, MEGAVAR)
- val cosphi = cosPhi(p.getValue.doubleValue(), q.getValue.doubleValue())
- val varCharacteristicString =
- "cosPhiFixed:{(0.0,%#.2f)}".formatLocal(Locale.ENGLISH, cosphi)
- val sRated = Quantities.getQuantity(input.sR, MEGAVOLTAMPERE)
-
- /* Flip the sign, as infeed is negative in PowerSystemDataModel */
- val timeSeries = PowerProfileConverter.convert(profile, p.multiply(-1))
-
- new FixedFeedInInput(
- uuid.getOrElse(UUID.randomUUID()),
- input.id + "_" + input.resType.toString,
- OperatorInput.NO_OPERATOR_ASSIGNED,
- OperationTime.notLimited(),
- node,
- new CosPhiFixed(varCharacteristicString),
- sRated,
- cosphi
- ) -> timeSeries
- }
-}
diff --git a/src/main/scala/edu/ie3/simbench/main/RunSimbench.scala b/src/main/scala/edu/ie3/simbench/main/RunSimbench.scala
index 4482ad04..6b207682 100644
--- a/src/main/scala/edu/ie3/simbench/main/RunSimbench.scala
+++ b/src/main/scala/edu/ie3/simbench/main/RunSimbench.scala
@@ -1,26 +1,10 @@
package edu.ie3.simbench.main
-import java.nio.file.Paths
-import edu.ie3.datamodel.io.naming.{
- DefaultDirectoryHierarchy,
- EntityPersistenceNamingStrategy,
- FileNamingStrategy
-}
-import edu.ie3.datamodel.io.sink.CsvFileSink
-import edu.ie3.simbench.config.{ConfigValidator, SimbenchConfig}
-import edu.ie3.simbench.convert.GridConverter
-import edu.ie3.simbench.exception.CodeValidationException
-import edu.ie3.simbench.io.{Downloader, IoUtils, SimbenchReader, Zipper}
-import edu.ie3.simbench.model.SimbenchCode
-import edu.ie3.util.io.FileIOUtils
-import org.apache.commons.io.FilenameUtils
+import akka.actor.typed.ActorSystem
-import scala.concurrent.Await
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration.Duration
-import scala.jdk.CollectionConverters._
-import scala.jdk.FutureConverters.CompletionStageOps
-import scala.util.{Failure, Success}
+import edu.ie3.simbench.actor.Coordinator
+import edu.ie3.simbench.actor.Coordinator.{CoordinatorMessage, Start}
+import edu.ie3.simbench.config.{ConfigValidator, SimbenchConfig}
/**
* This is not meant to be final production code. It is more a place for "testing" the full method stack.
@@ -36,103 +20,11 @@ object RunSimbench extends SimbenchHelper {
/* Validate the config */
ConfigValidator.checkValidity(simbenchConfig)
- simbenchConfig.io.simbenchCodes.foreach { simbenchCode =>
- logger.info(s"$simbenchCode - Downloading data set from SimBench website")
- val downloader =
- Downloader(
- simbenchConfig.io.input.download.folder,
- simbenchConfig.io.input.download.baseUrl,
- simbenchConfig.io.input.download.failOnExistingFiles
- )
- val downloadedFile =
- downloader.download(
- SimbenchCode(simbenchCode).getOrElse(
- throw CodeValidationException(
- s"'$simbenchCode' is no valid SimBench code."
- )
- )
- )
- val dataFolder =
- Zipper.unzip(
- downloadedFile,
- downloader.downloadFolder,
- simbenchConfig.io.input.download.failOnExistingFiles,
- flattenDirectories = true
- )
-
- logger.info(s"$simbenchCode - Reading in the SimBench data set")
- val simbenchReader = SimbenchReader(
- simbenchCode,
- dataFolder,
- simbenchConfig.io.input.csv.separator,
- simbenchConfig.io.input.csv.fileEnding,
- simbenchConfig.io.input.csv.fileEncoding
- )
- val simbenchModel = simbenchReader.readGrid()
-
- logger.info(s"$simbenchCode - Converting to PowerSystemDataModel")
- val (
- jointGridContainer,
- timeSeries,
- timeSeriesMapping,
- powerFlowResults
- ) =
- GridConverter.convert(
- simbenchCode,
- simbenchModel,
- simbenchConfig.conversion.removeSwitches
- )
-
- logger.info(s"$simbenchCode - Writing converted data set to files")
- /* Check, if a directory hierarchy is needed or not */
- val baseTargetDirectory =
- IoUtils.ensureHarmonizedAndTerminatingFileSeparator(
- simbenchConfig.io.output.targetFolder
- )
- val csvSink = if (simbenchConfig.io.output.csv.directoryHierarchy) {
- new CsvFileSink(
- baseTargetDirectory,
- new FileNamingStrategy(
- new EntityPersistenceNamingStrategy(),
- new DefaultDirectoryHierarchy(baseTargetDirectory, simbenchCode)
- ),
- false,
- simbenchConfig.io.output.csv.separator
- )
- } else {
- new CsvFileSink(
- baseTargetDirectory + simbenchCode,
- new FileNamingStrategy(),
- false,
- simbenchConfig.io.output.csv.separator
- )
- }
-
- csvSink.persistJointGrid(jointGridContainer)
- timeSeries.foreach(csvSink.persistTimeSeries(_))
- csvSink.persistAllIgnoreNested(timeSeriesMapping.asJava)
- csvSink.persistAll(powerFlowResults.asJava)
-
- if (simbenchConfig.io.output.compress) {
- logger.info(s"$simbenchCode - Adding files to compressed archive")
- val rawOutputPath = Paths.get(baseTargetDirectory + simbenchCode)
- val archivePath = Paths.get(
- FilenameUtils.concat(baseTargetDirectory, simbenchCode + ".tar.gz")
- )
- val compressFuture =
- FileIOUtils.compressDir(rawOutputPath, archivePath).asScala
- compressFuture.onComplete {
- case Success(_) =>
- FileIOUtils.deleteRecursively(rawOutputPath)
- case Failure(exception) =>
- logger.error(
- s"Compression of output files to '$archivePath' has failed. Keep raw data.",
- exception
- )
- }
-
- Await.ready(compressFuture, Duration("180s"))
- }
- }
+ /* Start the actor system */
+ val actorSystem = ActorSystem[CoordinatorMessage](
+ Coordinator(),
+ "Coordinator"
+ )
+ actorSystem ! Start(simbenchConfig)
}
}
diff --git a/src/test/scala/edu/ie3/simbench/convert/ConverterSpec.scala b/src/test/scala/edu/ie3/simbench/convert/ConverterSpec.scala
new file mode 100644
index 00000000..721b71b6
--- /dev/null
+++ b/src/test/scala/edu/ie3/simbench/convert/ConverterSpec.scala
@@ -0,0 +1,107 @@
+package edu.ie3.simbench.convert
+
+import akka.actor.testkit.typed.scaladsl.{ActorTestKit, TestProbe}
+import akka.actor.typed.ActorRef
+import edu.ie3.simbench.actor.{Converter, Coordinator, Mutator, WorkerMessage}
+import edu.ie3.simbench.actor.Mutator.{
+ MutatorMessage,
+ PersistGridStructure,
+ PersistNodalResults,
+ PersistTimeSeriesMapping
+}
+import edu.ie3.test.common.UnitSpec
+import org.scalatest.BeforeAndAfterAll
+
+import scala.concurrent.duration.FiniteDuration
+
+class ConverterSpec extends UnitSpec with BeforeAndAfterAll {
+ val akkaTestKit: ActorTestKit = ActorTestKit()
+
+ override protected def afterAll(): Unit = {
+ akkaTestKit.shutdownTestKit()
+ super.afterAll()
+ }
+
+ val coordinator: TestProbe[Coordinator.CoordinatorMessage] =
+ akkaTestKit.createTestProbe[Coordinator.CoordinatorMessage]("coordinator")
+ val mutator: TestProbe[MutatorMessage] =
+ akkaTestKit.createTestProbe[Mutator.MutatorMessage]("mutator")
+ val converter: ActorRef[Converter.ConverterMessage] = akkaTestKit.spawn(
+ Converter.idle(
+ Converter.StateData(
+ "1-LV-rural1--0-no_sw",
+ "src/test/resources/gridData/1-LV-rural1--0-no_sw",
+ "http://141.51.193.167/simbench/gui/usecase/download",
+ failOnExistingFiles = false,
+ ".csv",
+ "UTF-8",
+ ";",
+ removeSwitches = false,
+ 20,
+ mutator.ref,
+ coordinator.ref
+ )
+ ),
+ "converter"
+ )
+
+ "Converting a full grid model" should {
+ converter ! Converter.Convert("1-LV-rural1--0-no_sw")
+ "provide correct amount of converted models" in {
+
+ /* Receive 17 time series messages and reply completion */
+ (1 to 17).foldLeft(List.empty[Mutator.PersistTimeSeries]) {
+ case (messages, idx) =>
+ logger.debug("Received {} time series for persistence so far.", idx)
+ val message = mutator.expectMessageType[Mutator.PersistTimeSeries](
+ FiniteDuration(60, "s")
+ )
+ message.replyTo ! WorkerMessage.TimeSeriesPersisted(
+ message.timeSeries.getUuid
+ )
+ messages :+ message
+ }
+
+ /* Receive all all other awaited messages */
+ mutator.expectMessageType[Mutator.PersistGridStructure](
+ FiniteDuration(60, "s")
+ ) match {
+ case Mutator.PersistGridStructure(
+ simBenchCode,
+ nodes,
+ lines,
+ transformers2w,
+ transformers3w,
+ switches,
+ measurements,
+ loads,
+ fixedFeedIns,
+ _
+ ) =>
+ simBenchCode shouldBe "1-LV-rural1--0-no_sw"
+ nodes.size shouldBe 15
+ lines.size shouldBe 13
+ transformers2w.size shouldBe 1
+ transformers3w.size shouldBe 0
+ switches.size shouldBe 0
+ measurements.size shouldBe 0
+ loads.size shouldBe 13
+ fixedFeedIns.size shouldBe 4
+ }
+
+ mutator.expectMessageType[Mutator.PersistTimeSeriesMapping](
+ FiniteDuration(60, "s")
+ ) match {
+ case Mutator.PersistTimeSeriesMapping(mapping, _) =>
+ mapping.size shouldBe 17
+ }
+
+ mutator.expectMessageType[Mutator.PersistNodalResults](
+ FiniteDuration(60, "s")
+ ) match {
+ case Mutator.PersistNodalResults(results, _) =>
+ results.size shouldBe 15
+ }
+ }
+ }
+}
diff --git a/src/test/scala/edu/ie3/simbench/convert/GridConverterSpec.scala b/src/test/scala/edu/ie3/simbench/convert/GridConverterSpec.scala
index b4714e1a..0cc435fc 100644
--- a/src/test/scala/edu/ie3/simbench/convert/GridConverterSpec.scala
+++ b/src/test/scala/edu/ie3/simbench/convert/GridConverterSpec.scala
@@ -1,20 +1,32 @@
package edu.ie3.simbench.convert
+import akka.actor.testkit.typed.scaladsl.ActorTestKit
+
import java.nio.file.Paths
import java.util
import edu.ie3.datamodel.models.UniqueEntity
-import edu.ie3.datamodel.models.input.NodeInput
-import edu.ie3.datamodel.models.input.connector.{LineInput, Transformer2WInput}
-import edu.ie3.datamodel.models.input.system.{FixedFeedInInput, LoadInput}
+import edu.ie3.simbench.actor.GridConverter.ConvertGridStructure
+import edu.ie3.simbench.actor.{Converter, GridConverter}
import edu.ie3.simbench.convert.NodeConverter.AttributeOverride.JoinOverride
import edu.ie3.simbench.io.SimbenchReader
import edu.ie3.simbench.model.datamodel.{GridModel, Node, Switch}
import edu.ie3.test.common.{SwitchTestingData, UnitSpec}
-import org.scalatest.Inside._
+import org.scalatest.BeforeAndAfterAll
+import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.jdk.CollectionConverters._
-class GridConverterSpec extends UnitSpec with SwitchTestingData {
+class GridConverterSpec
+ extends UnitSpec
+ with BeforeAndAfterAll
+ with SwitchTestingData {
+ val akkaTestKit: ActorTestKit = ActorTestKit()
+
+ override protected def afterAll(): Unit = {
+ akkaTestKit.shutdownTestKit()
+ super.afterAll()
+ }
+
val simbenchReader: SimbenchReader = SimbenchReader(
"1-LV-rural1--0-no_sw",
Paths.get("src/test/resources/gridData/1-LV-rural1--0-no_sw")
@@ -139,67 +151,49 @@ class GridConverterSpec extends UnitSpec with SwitchTestingData {
"converting a full data set" should {
"bring the correct amount of converted models" in {
- val actual = GridConverter.convert(
+ /* Set up the actors */
+ val gridConverter = akkaTestKit.spawn(GridConverter(), "gridConverter")
+ val converter =
+ akkaTestKit.createTestProbe[Converter.ConverterMessage]("converter")
+
+ /* Issue the conversion */
+ gridConverter ! ConvertGridStructure(
"1-LV-rural1--0-no_sw",
- input,
- removeSwitches = false
+ input.nodes,
+ input.nodePFResults,
+ input.externalNets,
+ input.powerPlants,
+ input.res,
+ input.transformers2w,
+ input.transformers3w,
+ input.lines,
+ input.switches,
+ input.measurements,
+ removeSwitches = false,
+ converter.ref
)
- inside(actual) {
- case (
- gridContainer,
- timeSeries,
- timeSeriesMapping,
- powerFlowResults
+
+ converter.expectMessageType[Converter.GridStructureConverted](
+ FiniteDuration(60, "s")
+ ) match {
+ case Converter.GridStructureConverted(
+ nodeConversion,
+ nodeResults,
+ lines,
+ transformers2w,
+ transformers3w,
+ switches,
+ measurements
) =>
- /* Evaluate the correctness of the container by counting the occurrence of models (the correct conversion is
- * tested in separate unit tests */
- gridContainer.getGridName shouldBe "1-LV-rural1--0-no_sw"
- countClassOccurrences(gridContainer.getRawGrid.allEntitiesAsList()) shouldBe Map(
- classOf[NodeInput] -> 15,
- classOf[LineInput] -> 13,
- classOf[Transformer2WInput] -> 1
- )
- countClassOccurrences(
- gridContainer.getSystemParticipants.allEntitiesAsList()
- ) shouldBe Map(
- classOf[FixedFeedInInput] -> 4,
- classOf[LoadInput] -> 13
- )
- countClassOccurrences(gridContainer.getGraphics.allEntitiesAsList()) shouldBe Map
- .empty[Class[_ <: UniqueEntity], Int]
-
- /* Evaluate the correctness of the time series by counting the occurrence of models */
- timeSeries.size shouldBe 17
-
- /* Evaluate the existence of time series mappings for all participants */
- timeSeriesMapping.size shouldBe 17
- val participantUuids = gridContainer.getSystemParticipants
- .allEntitiesAsList()
- .asScala
- .map(_.getUuid)
- .toVector
- /* There is no participant uuid in mapping, that is not among participants */
- timeSeriesMapping.exists(
- entry => !participantUuids.contains(entry.getParticipant)
- ) shouldBe false
-
- /* Evaluate the amount of converted power flow results */
- powerFlowResults.size shouldBe 15
+ nodeConversion.size shouldBe 15
+ nodeResults.size shouldBe 15
+ lines.size shouldBe 13
+ transformers2w.size shouldBe 1
+ transformers3w.size shouldBe 0
+ switches.size shouldBe 0
+ measurements.size shouldBe 0
}
}
}
}
-
- def countClassOccurrences(
- entities: util.List[_ <: UniqueEntity]
- ): Map[Class[_ <: UniqueEntity], Int] =
- entities.asScala
- .groupBy(_.getClass)
- .map(
- classToOccurrences =>
- classToOccurrences._1 -> classToOccurrences._2.size
- )
- .toSeq
- .sortBy(_._1.getName)
- .toMap
}
diff --git a/src/test/scala/edu/ie3/simbench/convert/LoadConverterSpec.scala b/src/test/scala/edu/ie3/simbench/convert/LoadConverterSpec.scala
index 1eb29e28..884cde2b 100644
--- a/src/test/scala/edu/ie3/simbench/convert/LoadConverterSpec.scala
+++ b/src/test/scala/edu/ie3/simbench/convert/LoadConverterSpec.scala
@@ -2,6 +2,7 @@ package edu.ie3.simbench.convert
import edu.ie3.datamodel.models.StandardUnits
import edu.ie3.datamodel.models.input.NodeInput
+import edu.ie3.simbench.actor.LoadConverter
import edu.ie3.simbench.model.datamodel.profiles.LoadProfile
import edu.ie3.test.common.{ConverterTestData, UnitSpec}
import edu.ie3.util.quantities.PowerSystemUnits.KILOWATTHOUR
@@ -19,7 +20,7 @@ class LoadConverterSpec extends UnitSpec with ConverterTestData {
"The load converter" should {
"convert a single model correctly" in {
/* Time series conversion is tested in a single test */
- val (actual, _) = LoadConverter.convert(input, node, inputProfile)
+ val actual = LoadConverter.Worker.convertModel(input, node)
actual.getId shouldBe actual.getId
actual.getNode shouldBe expected.getNode
diff --git a/src/test/scala/edu/ie3/simbench/convert/PowerPlantConverterSpec.scala b/src/test/scala/edu/ie3/simbench/convert/PowerPlantConverterSpec.scala
index d591c1d4..3ba79710 100644
--- a/src/test/scala/edu/ie3/simbench/convert/PowerPlantConverterSpec.scala
+++ b/src/test/scala/edu/ie3/simbench/convert/PowerPlantConverterSpec.scala
@@ -1,15 +1,12 @@
package edu.ie3.simbench.convert
-import edu.ie3.datamodel.models.StandardUnits
+import edu.ie3.simbench.actor.PowerPlantConverter
import edu.ie3.simbench.model.datamodel.profiles.{
PowerPlantProfile,
PowerPlantProfileType
}
import edu.ie3.test.common.{ConverterTestData, UnitSpec}
import edu.ie3.test.matchers.QuantityMatchers
-import tech.units.indriya.quantity.Quantities
-
-import scala.jdk.OptionConverters.RichOptional
class PowerPlantConverterSpec
extends UnitSpec
@@ -19,26 +16,8 @@ class PowerPlantConverterSpec
"converting a power plant without reactive power information" should {
val (input, expected) = getPowerPlantPair("EHV Gen 1")
val node = getNodePair("EHV Bus 177")._2
- val pProfile: PowerPlantProfile = PowerPlantProfile(
- "test profile",
- PowerPlantProfileType.PowerPlantProfile1,
- Map(
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:00") -> BigDecimal(
- "0.75"
- ),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:15") -> BigDecimal(
- "0.55"
- ),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:30") -> BigDecimal(
- "0.35"
- ),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:45") -> BigDecimal(
- "0.15"
- )
- )
- )
- val (actual, actualTimeSeries) =
- PowerPlantConverter.convert(input, node, pProfile)
+ val actual =
+ PowerPlantConverter.Worker.convertModel(input, node)
"bring up the correct input model" in {
actual.getId shouldBe expected.getId
@@ -48,60 +27,13 @@ class PowerPlantConverterSpec
actual.getsRated shouldBe expected.getsRated
actual.getCosPhiRated shouldBe expected.getCosPhiRated
}
-
- "lead to the correct time series" in {
- val expected = Map(
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:00") -> Quantities
- .getQuantity(-222750.0, StandardUnits.ACTIVE_POWER_IN),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:15") -> Quantities
- .getQuantity(-163350.0, StandardUnits.ACTIVE_POWER_IN),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:30") -> Quantities
- .getQuantity(-103950.0, StandardUnits.ACTIVE_POWER_IN),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:45") -> Quantities
- .getQuantity(-44550.0, StandardUnits.ACTIVE_POWER_IN)
- )
-
- actualTimeSeries.getEntries.forEach { timeBasedValue =>
- val time = timeBasedValue.getTime
- val value = timeBasedValue.getValue
-
- expected.get(time) match {
- case Some(expectedValue) =>
- value.getP.toScala match {
- case Some(p) => p should equalWithTolerance(expectedValue)
- case None =>
- fail(s"Unable to get expected active power for time '$time'")
- }
- case None =>
- fail(s"Unable to get expected time series entry for time '$time'")
- }
- }
- }
}
"converting a power plant with reactive power information" should {
val (input, expected) = getPowerPlantPair("EHV Gen 1_withQ")
val node = getNodePair("EHV Bus 177")._2
- val pProfile: PowerPlantProfile = PowerPlantProfile(
- "test profile",
- PowerPlantProfileType.PowerPlantProfile1,
- Map(
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:00") -> BigDecimal(
- "0.75"
- ),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:15") -> BigDecimal(
- "0.55"
- ),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:30") -> BigDecimal(
- "0.35"
- ),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:45") -> BigDecimal(
- "0.15"
- )
- )
- )
- val (actual, actualTimeSeries) =
- PowerPlantConverter.convert(input, node, pProfile)
+ val actual =
+ PowerPlantConverter.Worker.convertModel(input, node)
"bring up the correct input model" in {
actual.getId shouldBe expected.getId
@@ -111,35 +43,6 @@ class PowerPlantConverterSpec
actual.getsRated shouldBe expected.getsRated
actual.getCosPhiRated shouldBe expected.getCosPhiRated
}
-
- "lead to the correct time series" in {
- val expected = Map(
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:00") -> Quantities
- .getQuantity(-222750.0, StandardUnits.ACTIVE_POWER_IN),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:15") -> Quantities
- .getQuantity(-163350.0, StandardUnits.ACTIVE_POWER_IN),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:30") -> Quantities
- .getQuantity(-103950.0, StandardUnits.ACTIVE_POWER_IN),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:45") -> Quantities
- .getQuantity(-44550.0, StandardUnits.ACTIVE_POWER_IN)
- )
-
- actualTimeSeries.getEntries.forEach { timeBasedValue =>
- val time = timeBasedValue.getTime
- val value = timeBasedValue.getValue
-
- expected.get(time) match {
- case Some(expectedP) =>
- value.getP.toScala match {
- case Some(p) => p should equalWithTolerance(expectedP)
- case None =>
- fail(s"Unable to get expected active power for time '$time'")
- }
- case None =>
- fail(s"Unable to get expected time series entry for time '$time'")
- }
- }
- }
}
}
}
diff --git a/src/test/scala/edu/ie3/simbench/convert/ResConverterSpec.scala b/src/test/scala/edu/ie3/simbench/convert/ResConverterSpec.scala
index 52c690d1..b5cd89af 100644
--- a/src/test/scala/edu/ie3/simbench/convert/ResConverterSpec.scala
+++ b/src/test/scala/edu/ie3/simbench/convert/ResConverterSpec.scala
@@ -1,14 +1,10 @@
package edu.ie3.simbench.convert
-import edu.ie3.datamodel.models.StandardUnits
+import edu.ie3.simbench.actor.ResConverter
import java.util.Objects
-import edu.ie3.simbench.model.datamodel.profiles.{ResProfile, ResProfileType}
import edu.ie3.test.common.{ConverterTestData, UnitSpec}
import edu.ie3.test.matchers.QuantityMatchers
-import tech.units.indriya.quantity.Quantities
-
-import scala.jdk.OptionConverters.RichOptional
class ResConverterSpec
extends UnitSpec
@@ -19,25 +15,7 @@ class ResConverterSpec
"The RES converter" should {
val (_, node) = getNodePair("MV1.101 Bus 4")
val (input, expected) = getResPair("MV1.101 SGen 2")
- val pProfile: ResProfile = ResProfile(
- "test profile",
- ResProfileType.LvRural1,
- Map(
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:00") -> BigDecimal(
- "0.75"
- ),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:15") -> BigDecimal(
- "0.55"
- ),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:30") -> BigDecimal(
- "0.35"
- ),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:45") -> BigDecimal(
- "0.15"
- )
- )
- )
- val (actual, actualTimeSeries) = ResConverter.convert(input, node, pProfile)
+ val actual = ResConverter.Worker.convertModel(input, node)
"bring up the correct input model" in {
Objects.nonNull(actual.getUuid) shouldBe true
@@ -49,34 +27,5 @@ class ResConverterSpec
actual.getsRated shouldBe expected.getsRated
actual.getCosPhiRated shouldBe expected.getCosPhiRated
}
-
- "lead to the correct time series" in {
- val expected = Map(
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:00") -> Quantities
- .getQuantity(-120.0, StandardUnits.ACTIVE_POWER_IN),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:15") -> Quantities
- .getQuantity(-88.0, StandardUnits.ACTIVE_POWER_IN),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:30") -> Quantities
- .getQuantity(-56.0, StandardUnits.ACTIVE_POWER_IN),
- simbenchTimeUtil.toZonedDateTime("01.01.1990 00:45") -> Quantities
- .getQuantity(-24.0, StandardUnits.ACTIVE_POWER_IN)
- )
-
- actualTimeSeries.getEntries.forEach { timeBasedValue =>
- val time = timeBasedValue.getTime
- val value = timeBasedValue.getValue
-
- expected.get(time) match {
- case Some(expectedValue) =>
- value.getP.toScala match {
- case Some(p) => p should equalWithTolerance(expectedValue)
- case None =>
- fail(s"Unable to get expected active power for time '$time'")
- }
- case None =>
- fail(s"Unable to get expected time series entry for time '$time'")
- }
- }
- }
}
}
diff --git a/src/test/scala/edu/ie3/test/common/ConfigTestData.scala b/src/test/scala/edu/ie3/test/common/ConfigTestData.scala
index ae9f0162..73100596 100644
--- a/src/test/scala/edu/ie3/test/common/ConfigTestData.scala
+++ b/src/test/scala/edu/ie3/test/common/ConfigTestData.scala
@@ -32,7 +32,8 @@ trait ConfigTestData {
List("1-LV-urban6--0-sw", "blabla", "1-EHVHV-mixed-2-0-sw")
)
- val validConversionConfig: Conversion = Conversion(removeSwitches = false)
+ val validConversionConfig: Conversion =
+ Conversion(removeSwitches = false, participantWorkersPerType = 20)
val validConfig = new SimbenchConfig(validConversionConfig, validIo)
}