From bacda3f9106153fda3f0ea11771b7a7237fd7c57 Mon Sep 17 00:00:00 2001 From: Sebastian Peter <14994800+sebastian-peter@users.noreply.github.com> Date: Mon, 13 Dec 2021 16:08:31 +0100 Subject: [PATCH 01/58] Adding Dockerfile with variable input --- Dockerfile | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 Dockerfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000..93b3e6aafc --- /dev/null +++ b/Dockerfile @@ -0,0 +1,23 @@ +FROM openjdk:8-jre-slim + +# USAGE: +# build with ARG version and if applicable with ARG snapshot suffix + # e.g.: docker build --build-arg version=1.0 --build-arg snapshotSuffix=-SNAPSHOT -t simona . +# run by mounting directory + # e.g. docker run -v `realpath inputData`:/inputData --rm simona + +ARG version +# snapshot suffix for jar files is "-SNAPSHOT" +ARG snapshotSuffix="" + +ENV jarFile="simona-${version}${snapshotSuffix}-all.jar" + +RUN mkdir exec +RUN mkdir inputData + +# copy simona fat jar into container +COPY build/libs/$jarFile exec/ +# inputData is mounted upon running +VOLUME /inputData + +ENTRYPOINT ["sh", "-c", "java -cp exec/${jarFile} edu.ie3.simona.main.RunSimonaStandalone --config=inputData/vn_simona/vn_simona.conf"] \ No newline at end of file From 2510bc12d468c98881abac2886bb9c38854c6278 Mon Sep 17 00:00:00 2001 From: Sebastian Peter <14994800+sebastian-peter@users.noreply.github.com> Date: Mon, 13 Dec 2021 16:13:37 +0100 Subject: [PATCH 02/58] Implementing Kafka result output --- build.gradle | 7 + .../resources/config/config-template.conf | 9 ++ .../ie3/simona/config/ConfigFailFast.scala | 20 ++- .../edu/ie3/simona/config/SimonaConfig.scala | 72 ++++++++- .../event/listener/ResultEventListener.scala | 34 +++- .../io/result/ResultEntityCsvSink.scala | 4 +- .../io/result/ResultEntityKafkaSink.scala | 131 +++++++++++++++ .../ie3/simona/io/result/ResultSinkType.scala | 18 +++ .../io/result/plain/NodeResultPlain.scala | 17 ++ .../simona/io/result/plain/ResultPlain.scala | 9 ++ .../util/scala/io/ScalaReflectionSerde.scala | 59 +++++++ .../simona/config/ConfigFailFastSpec.scala | 8 +- .../io/result/ResultEntityKafkaSpec.scala | 151 ++++++++++++++++++ .../simona/io/result/ResultSinkTypeSpec.scala | 14 +- .../edu/ie3/simona/test/KafkaFlatSpec.scala | 69 ++++++++ 15 files changed, 599 insertions(+), 23 deletions(-) create mode 100644 src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala create mode 100644 src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala create mode 100644 src/main/scala/edu/ie3/simona/io/result/plain/ResultPlain.scala create mode 100644 src/main/scala/edu/ie3/util/scala/io/ScalaReflectionSerde.scala create mode 100644 src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala create mode 100644 src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala diff --git a/build.gradle b/build.gradle index fb14e84664..be5966f55c 100644 --- a/build.gradle +++ b/build.gradle @@ -57,6 +57,7 @@ repositories { mavenCentral() // searches in Sonatype's central repository maven { url 'https://www.jitpack.io' } // allows github repos as dependencies maven { url 'https://oss.sonatype.org/content/repositories/snapshots' } // sonatype snapshot repo + maven { url "https://packages.confluent.io/maven" } // confluent repo (kafka) } dependencies { @@ -122,6 +123,12 @@ dependencies { implementation "com.sksamuel.scapegoat:scalac-scapegoat-plugin_${scalaBinaryVersion}:1.4.10" scalaCompilerPlugin "com.sksamuel.scapegoat:scalac-scapegoat-plugin_${scalaBinaryVersion}:1.4.8" + /* Kafka */ + implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.0.0' + implementation "io.confluent:kafka-streams-avro-serde:5.5.0" + implementation "com.sksamuel.avro4s:avro4s-core_2.13:4.0.11" + testImplementation 'org.testcontainers:kafka:1.16.2' // kafka testing + implementation 'org.apache.commons:commons-math3:3.6.1' // apache commons math3 implementation 'javax.measure:unit-api:2.1.3' implementation 'tech.units:indriya:2.1.2' // quantities diff --git a/src/main/resources/config/config-template.conf b/src/main/resources/config/config-template.conf index facc09d94f..5868991e48 100644 --- a/src/main/resources/config/config-template.conf +++ b/src/main/resources/config/config-template.conf @@ -198,6 +198,15 @@ simona.output.sink.influxDb1x { database = string } +#@optional +simona.output.sink.kafka { + topicNodeRes = string + runId = string + bootstrapServers = string + schemaRegistryUrl = string + linger = int // in ms +} + simona.output.grid = GridOutputConfig simona.output.participant = { defaultConfig = BaseOutputConfig diff --git a/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala b/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala index ddc7c37704..ed1e04e798 100644 --- a/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala +++ b/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala @@ -8,7 +8,7 @@ package edu.ie3.simona.config import com.typesafe.config.{Config, ConfigException} import com.typesafe.scalalogging.LazyLogging -import edu.ie3.simona.config.SimonaConfig.Simona.Output.Sink.InfluxDb1x +import edu.ie3.simona.config.SimonaConfig.Simona.Output.Sink.{InfluxDb1x, Kafka} import edu.ie3.simona.config.SimonaConfig.{BaseOutputConfig, RefSystemConfig} import edu.ie3.simona.exceptions.InvalidConfigParameterException import edu.ie3.simona.io.result.ResultSinkType @@ -137,7 +137,7 @@ case object ConfigFailFast extends LazyLogging { */ private def checkDataSink(sink: SimonaConfig.Simona.Output.Sink): Unit = { // ensures failure if new output sinks are added to enforce adaptions of the check sink method as well - val supportedSinks = Set("influxdb1x", "csv") + val supportedSinks = Set("influxdb1x", "csv", "kafka") if ( !sink.productElementNames .map(_.trim.toLowerCase) @@ -146,7 +146,7 @@ case object ConfigFailFast extends LazyLogging { ) throw new InvalidConfigParameterException( s"Newly added sink(s) " + - s"'${sink.productElementNames.toSet.removedAll(supportedSinks)}' detected! " + + s"'${sink.productElementNames.map(_.toLowerCase).toSet.removedAll(supportedSinks)}' detected! " + s"Please adapt 'ConfigFailFast' accordingly! Currently supported sinks: ${supportedSinks.mkString(", ")}." ) @@ -171,15 +171,25 @@ case object ConfigFailFast extends LazyLogging { "one sink is configured!" ) - // if this is db sink, check the connection sinkConfigs.find(_.isDefined) match { case Some(Some(influxDb1x: InfluxDb1x)) => + // if this is db sink, check the connection checkInfluxDb1xParams( "Sink", ResultSinkType.buildInfluxDb1xUrl(influxDb1x), influxDb1x.database ) - case _ => // no db connection, do nothing + case Some(Some(kafka: Kafka)) => + try { + UUID.fromString(kafka.runId) + } catch { + case e: IllegalArgumentException => + throw new InvalidConfigParameterException( + s"The UUID '${kafka.runId}' cannot be parsed as it is invalid.", + e + ) + } + case _ => // do nothing } } diff --git a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala index 22ab13f431..fd7699ebaf 100644 --- a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala +++ b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala @@ -1539,7 +1539,8 @@ object SimonaConfig { final case class Sink( csv: scala.Option[SimonaConfig.Simona.Output.Sink.Csv], - influxDb1x: scala.Option[SimonaConfig.Simona.Output.Sink.InfluxDb1x] + influxDb1x: scala.Option[SimonaConfig.Simona.Output.Sink.InfluxDb1x], + kafka: scala.Option[SimonaConfig.Simona.Output.Sink.Kafka] ) object Sink { final case class Csv( @@ -1618,6 +1619,64 @@ object SimonaConfig { } + final case class Kafka( + bootstrapServers: java.lang.String, + linger: scala.Int, + runId: java.lang.String, + schemaRegistryUrl: java.lang.String, + topicNodeRes: java.lang.String + ) + object Kafka { + def apply( + c: com.typesafe.config.Config, + parentPath: java.lang.String, + $tsCfgValidator: $TsCfgValidator + ): SimonaConfig.Simona.Output.Sink.Kafka = { + SimonaConfig.Simona.Output.Sink.Kafka( + bootstrapServers = + $_reqStr(parentPath, c, "bootstrapServers", $tsCfgValidator), + linger = $_reqInt(parentPath, c, "linger", $tsCfgValidator), + runId = $_reqStr(parentPath, c, "runId", $tsCfgValidator), + schemaRegistryUrl = + $_reqStr(parentPath, c, "schemaRegistryUrl", $tsCfgValidator), + topicNodeRes = + $_reqStr(parentPath, c, "topicNodeRes", $tsCfgValidator) + ) + } + private def $_reqInt( + parentPath: java.lang.String, + c: com.typesafe.config.Config, + path: java.lang.String, + $tsCfgValidator: $TsCfgValidator + ): scala.Int = { + if (c == null) 0 + else + try c.getInt(path) + catch { + case e: com.typesafe.config.ConfigException => + $tsCfgValidator.addBadPath(parentPath + path, e) + 0 + } + } + + private def $_reqStr( + parentPath: java.lang.String, + c: com.typesafe.config.Config, + path: java.lang.String, + $tsCfgValidator: $TsCfgValidator + ): java.lang.String = { + if (c == null) null + else + try c.getString(path) + catch { + case e: com.typesafe.config.ConfigException => + $tsCfgValidator.addBadPath(parentPath + path, e) + null + } + } + + } + def apply( c: com.typesafe.config.Config, parentPath: java.lang.String, @@ -1644,6 +1703,17 @@ object SimonaConfig { $tsCfgValidator ) ) + else None, + kafka = + if (c.hasPathOrNull("kafka")) + scala.Some( + SimonaConfig.Simona.Output.Sink + .Kafka( + c.getConfig("kafka"), + parentPath + "kafka.", + $tsCfgValidator + ) + ) else None ) } diff --git a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala index 7c7b14013d..d8ed0e6e7a 100644 --- a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala +++ b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala @@ -9,7 +9,7 @@ package edu.ie3.simona.event.listener import akka.actor.{ActorRef, FSM, PoisonPill, Props, Stash} import akka.stream.Materializer import edu.ie3.datamodel.io.processor.result.ResultEntityProcessor -import edu.ie3.datamodel.models.result.ResultEntity +import edu.ie3.datamodel.models.result.{NodeResult, ResultEntity} import edu.ie3.simona.agent.grid.GridResultsSupport.PartialTransformer3wResult import edu.ie3.simona.agent.state.AgentState import edu.ie3.simona.agent.state.AgentState.{Idle, Uninitialized} @@ -31,12 +31,7 @@ import edu.ie3.simona.exceptions.{ InitializationException, ProcessResultEventException } -import edu.ie3.simona.io.result.{ - ResultEntityCsvSink, - ResultEntityInfluxDbSink, - ResultEntitySink, - ResultSinkType -} +import edu.ie3.simona.io.result._ import edu.ie3.simona.logging.SimonaFSMActorLogging import edu.ie3.simona.sim.SimonaSim.ServiceInitComplete import edu.ie3.simona.util.ResultFileHierarchy @@ -137,6 +132,31 @@ object ResultEventListener extends Transformer3wResultSupport { (resultClass, _) ) ) + + case ResultSinkType.Kafka( + topicNodeRes, + runId, + bootstrapServers, + schemaRegistryUrl, + linger + ) => + val clzs: Iterable[Class[_ <: ResultEntity]] = Set( + classOf[NodeResult] + ) // TODO add classOf[LineResult] + clzs.map(clz => + Future.successful( + ( + clz, + ResultEntityKafkaSink[NodeResult]( + topicNodeRes, + runId, + bootstrapServers, + schemaRegistryUrl, + linger + ) + ) + ) + ) } } } diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultEntityCsvSink.scala b/src/main/scala/edu/ie3/simona/io/result/ResultEntityCsvSink.scala index 187c33c650..1d92f67bd0 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultEntityCsvSink.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultEntityCsvSink.scala @@ -187,7 +187,7 @@ final case class ResultEntityCsvSink private ( object ResultEntityCsvSink { - /** Default constructor to get an instance of [[ResultEntityCsvSource]] incl. + /** Default constructor to get an instance of [[ResultEntityCsvSink]] incl. * creation of the output file with the headers written * * @param outfileName @@ -202,7 +202,7 @@ object ResultEntityCsvSink { * @param materializer * the materializer to be used by the stream that writes the output file * @return - * instance of [[ResultEntityCsvSource]] to be used to write results + * instance of [[ResultEntityCsvSink]] to be used to write results */ def apply( outfileName: String, diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala new file mode 100644 index 0000000000..09f5b8cece --- /dev/null +++ b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala @@ -0,0 +1,131 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.io.result + +import com.sksamuel.avro4s.RecordFormat +import edu.ie3.datamodel.models.result.{NodeResult, ResultEntity} +import edu.ie3.simona.io.result.ResultEntityKafkaSink.PlainWriter +import edu.ie3.simona.io.result.plain.{NodeResultPlain, ResultPlain} +import edu.ie3.util.quantities.PowerSystemUnits +import edu.ie3.util.scala.io.ScalaReflectionSerde.reflectionSerializer4S +import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG +import org.apache.kafka.clients.producer.{ + KafkaProducer, + ProducerConfig, + ProducerRecord +} +import org.apache.kafka.common.serialization.{Serdes, Serializer} +import tech.units.indriya.quantity.Quantities + +import java.time.ZonedDateTime +import java.util.{Properties, UUID} +import scala.jdk.CollectionConverters._ +import scala.reflect.ClassTag + +final case class ResultEntityKafkaSink[ + V <: ResultEntity, + P <: ResultPlain +] private ( + producer: KafkaProducer[String, P], + plainWriter: PlainWriter[V, P], + topic: String +) extends ResultEntitySink { + + override def handleResultEntity(resultEntity: ResultEntity): Unit = { + val plainEntity = plainWriter.writePlain(resultEntity.asInstanceOf[V]) + producer.send( + new ProducerRecord[String, P](topic, plainEntity) + ) + } + + override def close(): Unit = { + producer.flush() + producer.close() + } +} + +object ResultEntityKafkaSink { + + def apply[R]( + topic: String, + runId: UUID, + bootstrapServers: String, + schemaRegistryUrl: String, + linger: Int + )(implicit + tag: ClassTag[R] + ): ResultEntityKafkaSink[_ <: ResultEntity, _ <: ResultPlain] = { + val props = new Properties() + props.put(ProducerConfig.LINGER_MS_CONFIG, linger) + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers) + props.put( + ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, + true + ) // exactly once delivery + + val NodeResClass = classOf[NodeResult] + + tag.runtimeClass match { + case NodeResClass => + implicit val recordFormat: RecordFormat[NodeResultPlain] = + RecordFormat[NodeResultPlain] + createSink(schemaRegistryUrl, props, topic, NodeResultWriter(runId)) + } + } + + private def createSink[F <: ResultEntity, P <: ResultPlain: RecordFormat]( + schemaRegistryUrl: String, + props: Properties, + topic: String, + writer: PlainWriter[F, P] + ): ResultEntityKafkaSink[F, P] = { + val keySerializer = Serdes.String().serializer() + val valueSerializer: Serializer[P] = reflectionSerializer4S[P] + + valueSerializer.configure( + Map(SCHEMA_REGISTRY_URL_CONFIG -> schemaRegistryUrl).asJava, + false + ) + + ResultEntityKafkaSink( + new KafkaProducer[String, P]( + props, + keySerializer, + valueSerializer + ), + writer, + topic + ) + } + + trait PlainWriter[F <: ResultEntity, P <: ResultPlain] { + def writePlain(full: F): P + + def createFull(plain: P): F + } + + case class NodeResultWriter(runId: UUID) + extends PlainWriter[NodeResult, NodeResultPlain] { + override def writePlain(full: NodeResult): NodeResultPlain = + NodeResultPlain( + runId, + full.getTime.toString, + full.getUuid, + full.getvMag().getValue.doubleValue(), + full.getvAng().getValue.doubleValue() + ) + + override def createFull(plain: NodeResultPlain): NodeResult = { + new NodeResult( + ZonedDateTime.parse(plain.dateTime), + plain.uuid, + Quantities.getQuantity(plain.vMagPU, PowerSystemUnits.PU), + Quantities.getQuantity(plain.vAngDeg, PowerSystemUnits.DEGREE_GEOM) + ) + } + } +} diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala b/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala index 47c6e953ba..607c3025eb 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala @@ -8,6 +8,8 @@ package edu.ie3.simona.io.result import edu.ie3.simona.config.SimonaConfig +import java.util.UUID + /** Enumeration to describe all eligible types of * [[edu.ie3.datamodel.models.result.ResultEntity]] sink */ @@ -24,6 +26,14 @@ object ResultSinkType { final case class InfluxDb1x(url: String, database: String, scenario: String) extends ResultSinkType + final case class Kafka( + topicNodeRes: String, + runId: UUID, + bootstrapServers: String, + schemaRegistryUrl: String, + linger: Int + ) extends ResultSinkType + def apply( sinkConfig: SimonaConfig.Simona.Output.Sink, runName: String @@ -40,6 +50,14 @@ object ResultSinkType { Csv(params.fileFormat, params.filePrefix, params.fileSuffix) case Some(params: SimonaConfig.Simona.Output.Sink.InfluxDb1x) => InfluxDb1x(buildInfluxDb1xUrl(params), params.database, runName) + case Some(params: SimonaConfig.Simona.Output.Sink.Kafka) => + Kafka( + params.topicNodeRes, + UUID.fromString(params.runId), + params.bootstrapServers, + params.schemaRegistryUrl, + params.linger + ) case None => throw new IllegalArgumentException( s"No sinks defined! Cannot determine the sink type!" diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala b/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala new file mode 100644 index 0000000000..1be86a826a --- /dev/null +++ b/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala @@ -0,0 +1,17 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.io.result.plain + +import java.util.UUID + +case class NodeResultPlain( + runId: UUID, + dateTime: String, + uuid: UUID, + vMagPU: Double, + vAngDeg: Double +) extends ResultPlain diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/ResultPlain.scala b/src/main/scala/edu/ie3/simona/io/result/plain/ResultPlain.scala new file mode 100644 index 0000000000..2e6a71e7e1 --- /dev/null +++ b/src/main/scala/edu/ie3/simona/io/result/plain/ResultPlain.scala @@ -0,0 +1,9 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.io.result.plain + +trait ResultPlain {} diff --git a/src/main/scala/edu/ie3/util/scala/io/ScalaReflectionSerde.scala b/src/main/scala/edu/ie3/util/scala/io/ScalaReflectionSerde.scala new file mode 100644 index 0000000000..e129c8dff6 --- /dev/null +++ b/src/main/scala/edu/ie3/util/scala/io/ScalaReflectionSerde.scala @@ -0,0 +1,59 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.util.scala.io + +import com.sksamuel.avro4s.RecordFormat +import io.confluent.kafka.streams.serdes.avro.{ + GenericAvroDeserializer, + GenericAvroSerializer +} +import org.apache.kafka.common.serialization.{Deserializer, Serializer} + +/** As seen at + * https://kafka-tutorials.confluent.io/produce-consume-lang/scala.html + */ +object ScalaReflectionSerde { + + def reflectionSerializer4S[T: RecordFormat]: Serializer[T] = + new Serializer[T] { + val inner = new GenericAvroSerializer() + + override def configure( + configs: java.util.Map[String, _], + isKey: Boolean + ): Unit = inner.configure(configs, isKey) + + override def serialize(topic: String, maybeData: T): Array[Byte] = + Option(maybeData) + .map(data => + inner.serialize(topic, implicitly[RecordFormat[T]].to(data)) + ) + .getOrElse(Array.emptyByteArray) + + override def close(): Unit = inner.close() + } + + def reflectionDeserializer4S[T: RecordFormat]: Deserializer[T] = + new Deserializer[T] { + val inner = new GenericAvroDeserializer() + + override def configure( + configs: java.util.Map[String, _], + isKey: Boolean + ): Unit = inner.configure(configs, isKey) + + override def deserialize(topic: String, maybeData: Array[Byte]): T = + Option(maybeData) + .filter(_.nonEmpty) + .map(data => + implicitly[RecordFormat[T]].from(inner.deserialize(topic, data)) + ) + .getOrElse(null.asInstanceOf[T]) + + override def close(): Unit = inner.close() + } +} diff --git a/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala b/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala index 805236776a..c34d157451 100644 --- a/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala +++ b/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala @@ -709,15 +709,15 @@ class ConfigFailFastSpec extends UnitSpec with ConfigTestData { "throw an exception if no sink is provided" in { intercept[InvalidConfigParameterException] { - ConfigFailFast invokePrivate checkDataSinks(Sink(None, None)) + ConfigFailFast invokePrivate checkDataSinks(Sink(None, None, None)) }.getLocalizedMessage shouldBe "No sink configuration found! Please ensure that at least " + - "one sink is configured! You can choose from: influxdb1x, csv." + "one sink is configured! You can choose from: influxdb1x, csv, kafka." } "throw an exception if more than one sink is provided" in { intercept[InvalidConfigParameterException] { ConfigFailFast invokePrivate checkDataSinks( - Sink(Some(Csv("", "", "")), Some(InfluxDb1x("", 0, ""))) + Sink(Some(Csv("", "", "")), Some(InfluxDb1x("", 0, "")), None) ) }.getLocalizedMessage shouldBe "Multiple sink configurations are not supported! Please ensure that only " + "one sink is configured!" @@ -726,7 +726,7 @@ class ConfigFailFastSpec extends UnitSpec with ConfigTestData { "throw an exception if an influxDb1x is configured, but not accessible" ignore { intercept[java.lang.IllegalArgumentException] { ConfigFailFast invokePrivate checkDataSinks( - Sink(None, Some(InfluxDb1x("", 0, ""))) + Sink(None, Some(InfluxDb1x("", 0, "")), None) ) }.getLocalizedMessage shouldBe "Unable to reach configured influxDb1x with url ':0' for 'Sink' configuration and database ''. " + "Exception: java.lang.IllegalArgumentException: Unable to parse url: :0" diff --git a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala new file mode 100644 index 0000000000..0388ed630d --- /dev/null +++ b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala @@ -0,0 +1,151 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.io.result + +import com.sksamuel.avro4s.RecordFormat +import edu.ie3.datamodel.models.result.NodeResult +import edu.ie3.simona.io.result.plain.NodeResultPlain +import edu.ie3.simona.test.KafkaFlatSpec +import edu.ie3.simona.test.KafkaFlatSpec.Topic +import edu.ie3.util.quantities.PowerSystemUnits +import edu.ie3.util.scala.io.ScalaReflectionSerde +import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.apache.kafka.common.TopicPartition +import org.apache.kafka.common.serialization.{Deserializer, Serdes} +import org.apache.kafka.common.utils.Bytes +import tech.units.indriya.quantity.Quantities + +import java.time.ZonedDateTime +import java.util.UUID +import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ +import scala.jdk.DurationConverters._ +import scala.language.postfixOps + +/** Adapted from + * https://kafka-tutorials.confluent.io/produce-consume-lang/scala.html + */ +class ResultEntityKafkaSpec extends KafkaFlatSpec { + + var testConsumer: KafkaConsumer[Bytes, NodeResultPlain] = _ + + private implicit lazy val resultFormat: RecordFormat[NodeResultPlain] = + RecordFormat[NodeResultPlain] + val deserializer: Deserializer[NodeResultPlain] = + ScalaReflectionSerde.reflectionDeserializer4S[NodeResultPlain] + + private val topic = "testtopic" + + override val testTopics: Vector[Topic] = Vector( + Topic(topic, 1, 1) + ) + + deserializer.configure( + Map(SCHEMA_REGISTRY_URL_CONFIG -> "mock://unused:8081").asJava, + false + ) + + override def beforeAll(): Unit = { + super.beforeAll() + val config = Map[String, AnyRef]( + "group.id" -> "test", + "bootstrap.servers" -> kafka.getBootstrapServers + ) + testConsumer = new KafkaConsumer[Bytes, NodeResultPlain]( + config.asJava, + Serdes.Bytes().deserializer(), + deserializer + ) + } + + "produce" should "write a series of new NodeResults to kafka" in { + + Given("a producer config") + + val mockSchemaRegistryUrl = "mock://unused:8081" + + val runId = UUID.randomUUID() + + val resultEntitySink = ResultEntityKafkaSink[NodeResult]( + topic, + runId, + kafka.getBootstrapServers, + mockSchemaRegistryUrl, + 0 + ) + + And("a collection of NodeResults") + val nodeRes1 = new NodeResult( + ZonedDateTime.parse("2021-01-01T00:00:00+01:00[Europe/Berlin]"), + UUID.randomUUID(), + Quantities.getQuantity(1d, PowerSystemUnits.PU), + Quantities.getQuantity(0d, PowerSystemUnits.DEGREE_GEOM) + ) + val nodeRes2 = new NodeResult( + ZonedDateTime.parse("2021-01-01T00:00:00+01:00[Europe/Berlin]"), + UUID.randomUUID(), + Quantities.getQuantity(0.8d, PowerSystemUnits.PU), + Quantities.getQuantity(15d, PowerSystemUnits.DEGREE_GEOM) + ) + val nodeRes3 = new NodeResult( + ZonedDateTime.parse("2021-01-10T00:00:00+01:00[Europe/Berlin]"), + UUID.randomUUID(), + Quantities.getQuantity(0.75d, PowerSystemUnits.PU), + Quantities.getQuantity(90d, PowerSystemUnits.DEGREE_GEOM) + ) + + When("producing the NodeResults") + resultEntitySink.handleResultEntity(nodeRes1) + resultEntitySink.handleResultEntity(nodeRes2) + resultEntitySink.handleResultEntity(nodeRes3) + + val topicPartitions: Seq[TopicPartition] = + (0 until testTopics.head.partitions) + .map(new TopicPartition(testTopics.head.name, _)) + + testConsumer.assign(topicPartitions.asJava) + + Then("records can be fetched from Kafka") + eventually(timeout(5 second), interval(1 second)) { + testConsumer.seekToBeginning(topicPartitions.asJava) + val records: List[NodeResultPlain] = + testConsumer.poll((1 second) toJava).asScala.map(_.value()).toList + + records should have length 3 + records should contain( + NodeResultPlain( + runId, + nodeRes1.getTime.toString, + nodeRes1.getUuid, + nodeRes1.getvMag().getValue.doubleValue(), + nodeRes1.getvAng().getValue.doubleValue() + ) + ) + records should contain( + NodeResultPlain( + runId, + nodeRes2.getTime.toString, + nodeRes2.getUuid, + nodeRes2.getvMag().getValue.doubleValue(), + nodeRes2.getvAng().getValue.doubleValue() + ) + ) + records should contain( + NodeResultPlain( + runId, + nodeRes3.getTime.toString, + nodeRes3.getUuid, + nodeRes3.getvMag().getValue.doubleValue(), + nodeRes3.getvAng().getValue.doubleValue() + ) + ) + } + + resultEntitySink.close() + } +} diff --git a/src/test/scala/edu/ie3/simona/io/result/ResultSinkTypeSpec.scala b/src/test/scala/edu/ie3/simona/io/result/ResultSinkTypeSpec.scala index ca54c531c5..32baa40e8f 100644 --- a/src/test/scala/edu/ie3/simona/io/result/ResultSinkTypeSpec.scala +++ b/src/test/scala/edu/ie3/simona/io/result/ResultSinkTypeSpec.scala @@ -21,7 +21,8 @@ class ResultSinkTypeSpec extends UnitSpec { fileSuffix = "" ) ), - influxDb1x = None + influxDb1x = None, + kafka = None ) inside(ResultSinkType(conf, "testRun")) { @@ -43,7 +44,8 @@ class ResultSinkTypeSpec extends UnitSpec { port = 1, url = "localhost/" ) - ) + ), + kafka = None ) val runName = "testRun" @@ -57,6 +59,8 @@ class ResultSinkTypeSpec extends UnitSpec { } } + // TODO kafka sink + "fail when more than one sink is supplied" in { val conf = SimonaConfig.Simona.Output.Sink( csv = Some( @@ -72,7 +76,8 @@ class ResultSinkTypeSpec extends UnitSpec { port = 1, url = "localhost" ) - ) + ), + kafka = None ) assertThrows[IllegalArgumentException](ResultSinkType(conf, "testRun")) @@ -81,7 +86,8 @@ class ResultSinkTypeSpec extends UnitSpec { "fail when no sink is supplied" in { val conf = SimonaConfig.Simona.Output.Sink( csv = None, - influxDb1x = None + influxDb1x = None, + kafka = None ) assertThrows[IllegalArgumentException](ResultSinkType(conf, "testRun")) diff --git a/src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala b/src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala new file mode 100644 index 0000000000..d0d144e80a --- /dev/null +++ b/src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala @@ -0,0 +1,69 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.test + +import edu.ie3.simona.test.KafkaFlatSpec.Topic +import org.apache.kafka.clients.admin.{Admin, NewTopic} +import org.junit.Rule +import org.scalatest.concurrent.Eventually +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatest.{BeforeAndAfterAll, GivenWhenThen, Inspectors} +import org.testcontainers.containers.KafkaContainer +import org.testcontainers.utility.DockerImageName + +import scala.jdk.CollectionConverters._ + +/** Adapted from + * https://kafka-tutorials.confluent.io/produce-consume-lang/scala.html + */ +trait KafkaFlatSpec + extends AnyFlatSpec + with Matchers + with Inspectors + with BeforeAndAfterAll + with GivenWhenThen + with Eventually { + + val testTopics: Vector[Topic] + + @Rule + val kafka = new KafkaContainer( + DockerImageName.parse("confluentinc/cp-kafka:6.1.0") + ) + lazy val admin: Admin = Admin.create( + Map[String, AnyRef]("bootstrap.servers" -> kafka.getBootstrapServers).asJava + ) + + override def beforeAll(): Unit = { + super.beforeAll() + kafka.start() + admin.createTopics( + testTopics.map { topic => + new NewTopic( + topic.name, + topic.partitions, + topic.replicationFactor + ) + }.asJava + ) + } + + override def afterAll(): Unit = { + admin.close() + kafka.stop() + super.afterAll() + } +} + +object KafkaFlatSpec { + final case class Topic( + name: String, + partitions: Int, + replicationFactor: Short + ) +} From 677b7a4d128bf17ce16cc4a1ef52f6ae97139dae Mon Sep 17 00:00:00 2001 From: t-ober <63147366+t-ober@users.noreply.github.com> Date: Wed, 15 Dec 2021 14:07:15 +0100 Subject: [PATCH 03/58] include kafka sink config --- src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala b/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala index 607c3025eb..f74d450e90 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala @@ -38,7 +38,7 @@ object ResultSinkType { sinkConfig: SimonaConfig.Simona.Output.Sink, runName: String ): ResultSinkType = { - val sink: Seq[Any] = Seq(sinkConfig.csv, sinkConfig.influxDb1x).flatten + val sink: Seq[Any] = Seq(sinkConfig.csv, sinkConfig.influxDb1x, sinkConfig.kafka).flatten if (sink.size > 1) throw new IllegalArgumentException( From 3f6cea1f755f202884bb48c9036a8da0d5e8f59a Mon Sep 17 00:00:00 2001 From: t-ober <63147366+t-ober@users.noreply.github.com> Date: Wed, 15 Dec 2021 14:12:19 +0100 Subject: [PATCH 04/58] rename node result params --- .../edu/ie3/simona/io/result/ResultEntityKafkaSink.scala | 6 +++--- .../edu/ie3/simona/io/result/plain/NodeResultPlain.scala | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala index 09f5b8cece..e59ed3ad68 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala @@ -121,10 +121,10 @@ object ResultEntityKafkaSink { override def createFull(plain: NodeResultPlain): NodeResult = { new NodeResult( - ZonedDateTime.parse(plain.dateTime), + ZonedDateTime.parse(plain.time), plain.uuid, - Quantities.getQuantity(plain.vMagPU, PowerSystemUnits.PU), - Quantities.getQuantity(plain.vAngDeg, PowerSystemUnits.DEGREE_GEOM) + Quantities.getQuantity(plain.vMag, PowerSystemUnits.PU), + Quantities.getQuantity(plain.vAng, PowerSystemUnits.DEGREE_GEOM) ) } } diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala b/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala index 1be86a826a..10fe0d1152 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala @@ -9,9 +9,9 @@ package edu.ie3.simona.io.result.plain import java.util.UUID case class NodeResultPlain( - runId: UUID, - dateTime: String, + simRunId: UUID, + time: String, uuid: UUID, - vMagPU: Double, - vAngDeg: Double + vMag: Double, + vAng: Double ) extends ResultPlain From 50ca7a086dbca1c2ced7e1ee26416b0033c184b5 Mon Sep 17 00:00:00 2001 From: t-ober <63147366+t-ober@users.noreply.github.com> Date: Wed, 15 Dec 2021 14:13:42 +0100 Subject: [PATCH 05/58] include input model uuid --- .../scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala | 1 + .../scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala | 1 + 2 files changed, 2 insertions(+) diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala index e59ed3ad68..9853e0eba2 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala @@ -115,6 +115,7 @@ object ResultEntityKafkaSink { runId, full.getTime.toString, full.getUuid, + full.getInputModel, full.getvMag().getValue.doubleValue(), full.getvAng().getValue.doubleValue() ) diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala b/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala index 10fe0d1152..12d2205b0a 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala @@ -12,6 +12,7 @@ case class NodeResultPlain( simRunId: UUID, time: String, uuid: UUID, + inputModel: UUID, vMag: Double, vAng: Double ) extends ResultPlain From e721f2e9e6ba9dbbb00cf95f9a773bb0d9d89291 Mon Sep 17 00:00:00 2001 From: t-ober <63147366+t-ober@users.noreply.github.com> Date: Wed, 15 Dec 2021 14:18:36 +0100 Subject: [PATCH 06/58] use simple time stamp --- .../io/result/ResultEntityKafkaSink.scala | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala index 9853e0eba2..659926871e 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala @@ -13,15 +13,12 @@ import edu.ie3.simona.io.result.plain.{NodeResultPlain, ResultPlain} import edu.ie3.util.quantities.PowerSystemUnits import edu.ie3.util.scala.io.ScalaReflectionSerde.reflectionSerializer4S import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG -import org.apache.kafka.clients.producer.{ - KafkaProducer, - ProducerConfig, - ProducerRecord -} +import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord} import org.apache.kafka.common.serialization.{Serdes, Serializer} import tech.units.indriya.quantity.Quantities import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter import java.util.{Properties, UUID} import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag @@ -106,19 +103,26 @@ object ResultEntityKafkaSink { def writePlain(full: F): P def createFull(plain: P): F + + def createSimpleTimeStamp(dateTime: ZonedDateTime): String = { + val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") + dateTime.format(formatter) + } } case class NodeResultWriter(runId: UUID) extends PlainWriter[NodeResult, NodeResultPlain] { - override def writePlain(full: NodeResult): NodeResultPlain = + override def writePlain(full: NodeResult): NodeResultPlain = { + NodeResultPlain( runId, - full.getTime.toString, + createSimpleTimeStamp(full.getTime), full.getUuid, full.getInputModel, - full.getvMag().getValue.doubleValue(), - full.getvAng().getValue.doubleValue() + full.getvMag.getValue.doubleValue(), + full.getvAng.getValue.doubleValue() ) + } override def createFull(plain: NodeResultPlain): NodeResult = { new NodeResult( From 18da5b95cae196fa6889280fa2ba6dfb40219ab3 Mon Sep 17 00:00:00 2001 From: t-ober <63147366+t-ober@users.noreply.github.com> Date: Wed, 15 Dec 2021 14:33:43 +0100 Subject: [PATCH 07/58] use sealed traits and move to separate file --- .../io/result/ResultEntityKafkaSink.scala | 54 +++---------------- .../io/result/plain/NodeResultPlain.scala | 18 ------- .../simona/io/result/plain/PlainResult.scala | 24 +++++++++ .../simona/io/result/plain/PlainWriter.scala | 48 +++++++++++++++++ .../simona/io/result/plain/ResultPlain.scala | 9 ---- .../io/result/ResultEntityKafkaSpec.scala | 16 +++--- 6 files changed, 88 insertions(+), 81 deletions(-) delete mode 100644 src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala create mode 100644 src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala create mode 100644 src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala delete mode 100644 src/main/scala/edu/ie3/simona/io/result/plain/ResultPlain.scala diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala index 659926871e..9f44410e4e 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala @@ -8,24 +8,21 @@ package edu.ie3.simona.io.result import com.sksamuel.avro4s.RecordFormat import edu.ie3.datamodel.models.result.{NodeResult, ResultEntity} -import edu.ie3.simona.io.result.ResultEntityKafkaSink.PlainWriter -import edu.ie3.simona.io.result.plain.{NodeResultPlain, ResultPlain} -import edu.ie3.util.quantities.PowerSystemUnits +import edu.ie3.simona.io.result.plain.PlainResult.PlainNodeResult +import edu.ie3.simona.io.result.plain.PlainWriter.NodeResultWriter +import edu.ie3.simona.io.result.plain.{PlainResult, PlainWriter} import edu.ie3.util.scala.io.ScalaReflectionSerde.reflectionSerializer4S import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord} import org.apache.kafka.common.serialization.{Serdes, Serializer} -import tech.units.indriya.quantity.Quantities -import java.time.ZonedDateTime -import java.time.format.DateTimeFormatter import java.util.{Properties, UUID} import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag final case class ResultEntityKafkaSink[ V <: ResultEntity, - P <: ResultPlain + P <: PlainResult ] private ( producer: KafkaProducer[String, P], plainWriter: PlainWriter[V, P], @@ -55,7 +52,7 @@ object ResultEntityKafkaSink { linger: Int )(implicit tag: ClassTag[R] - ): ResultEntityKafkaSink[_ <: ResultEntity, _ <: ResultPlain] = { + ): ResultEntityKafkaSink[_ <: ResultEntity, _ <: PlainResult] = { val props = new Properties() props.put(ProducerConfig.LINGER_MS_CONFIG, linger) props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers) @@ -68,13 +65,13 @@ object ResultEntityKafkaSink { tag.runtimeClass match { case NodeResClass => - implicit val recordFormat: RecordFormat[NodeResultPlain] = - RecordFormat[NodeResultPlain] + implicit val recordFormat: RecordFormat[PlainNodeResult] = + RecordFormat[PlainNodeResult] createSink(schemaRegistryUrl, props, topic, NodeResultWriter(runId)) } } - private def createSink[F <: ResultEntity, P <: ResultPlain: RecordFormat]( + private def createSink[F <: ResultEntity, P <: PlainResult: RecordFormat]( schemaRegistryUrl: String, props: Properties, topic: String, @@ -98,39 +95,4 @@ object ResultEntityKafkaSink { topic ) } - - trait PlainWriter[F <: ResultEntity, P <: ResultPlain] { - def writePlain(full: F): P - - def createFull(plain: P): F - - def createSimpleTimeStamp(dateTime: ZonedDateTime): String = { - val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") - dateTime.format(formatter) - } - } - - case class NodeResultWriter(runId: UUID) - extends PlainWriter[NodeResult, NodeResultPlain] { - override def writePlain(full: NodeResult): NodeResultPlain = { - - NodeResultPlain( - runId, - createSimpleTimeStamp(full.getTime), - full.getUuid, - full.getInputModel, - full.getvMag.getValue.doubleValue(), - full.getvAng.getValue.doubleValue() - ) - } - - override def createFull(plain: NodeResultPlain): NodeResult = { - new NodeResult( - ZonedDateTime.parse(plain.time), - plain.uuid, - Quantities.getQuantity(plain.vMag, PowerSystemUnits.PU), - Quantities.getQuantity(plain.vAng, PowerSystemUnits.DEGREE_GEOM) - ) - } - } } diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala b/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala deleted file mode 100644 index 12d2205b0a..0000000000 --- a/src/main/scala/edu/ie3/simona/io/result/plain/NodeResultPlain.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation - */ - -package edu.ie3.simona.io.result.plain - -import java.util.UUID - -case class NodeResultPlain( - simRunId: UUID, - time: String, - uuid: UUID, - inputModel: UUID, - vMag: Double, - vAng: Double -) extends ResultPlain diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala new file mode 100644 index 0000000000..613df98eb6 --- /dev/null +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala @@ -0,0 +1,24 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.io.result.plain + +import java.util.UUID + +sealed trait PlainResult + +object PlainResult { + case class PlainNodeResult( + simRunId: UUID, + time: String, + uuid: UUID, + inputModel: UUID, + vMag: Double, + vAng: Double + ) extends PlainResult +} + + diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala new file mode 100644 index 0000000000..8f4df2b715 --- /dev/null +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala @@ -0,0 +1,48 @@ +package edu.ie3.simona.io.result.plain + +import edu.ie3.datamodel.models.result.{NodeResult, ResultEntity} +import edu.ie3.simona.io.result.plain.PlainResult.PlainNodeResult +import edu.ie3.util.quantities.PowerSystemUnits +import tech.units.indriya.quantity.Quantities + +import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter +import java.util.UUID + +sealed trait PlainWriter[F <: ResultEntity, P <: PlainResult] { + def writePlain(full: F): P + + def createFull(plain: P): F + + def createSimpleTimeStamp(dateTime: ZonedDateTime): String = { + val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") + dateTime.format(formatter) + } +} + +object PlainWriter{ + + case class NodeResultWriter(runId: UUID) + extends PlainWriter[NodeResult, PlainNodeResult] { + + override def writePlain(full: NodeResult): PlainNodeResult = { + PlainNodeResult( + runId, + createSimpleTimeStamp(full.getTime), + full.getUuid, + full.getInputModel, + full.getvMag.getValue.doubleValue(), + full.getvAng.getValue.doubleValue() + ) + } + + override def createFull(plain: PlainNodeResult): NodeResult = { + new NodeResult( + ZonedDateTime.parse(plain.time), + plain.uuid, + Quantities.getQuantity(plain.vMag, PowerSystemUnits.PU), + Quantities.getQuantity(plain.vAng, PowerSystemUnits.DEGREE_GEOM) + ) + } + } +} \ No newline at end of file diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/ResultPlain.scala b/src/main/scala/edu/ie3/simona/io/result/plain/ResultPlain.scala deleted file mode 100644 index 2e6a71e7e1..0000000000 --- a/src/main/scala/edu/ie3/simona/io/result/plain/ResultPlain.scala +++ /dev/null @@ -1,9 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation - */ - -package edu.ie3.simona.io.result.plain - -trait ResultPlain {} diff --git a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala index 0388ed630d..3e7a8f444e 100644 --- a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala +++ b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala @@ -8,7 +8,7 @@ package edu.ie3.simona.io.result import com.sksamuel.avro4s.RecordFormat import edu.ie3.datamodel.models.result.NodeResult -import edu.ie3.simona.io.result.plain.NodeResultPlain +import edu.ie3.simona.io.result.plain.PlainNodeResult import edu.ie3.simona.test.KafkaFlatSpec import edu.ie3.simona.test.KafkaFlatSpec.Topic import edu.ie3.util.quantities.PowerSystemUnits @@ -32,12 +32,12 @@ import scala.language.postfixOps */ class ResultEntityKafkaSpec extends KafkaFlatSpec { - var testConsumer: KafkaConsumer[Bytes, NodeResultPlain] = _ + var testConsumer: KafkaConsumer[Bytes, PlainNodeResult] = _ - private implicit lazy val resultFormat: RecordFormat[NodeResultPlain] = - RecordFormat[NodeResultPlain] - val deserializer: Deserializer[NodeResultPlain] = - ScalaReflectionSerde.reflectionDeserializer4S[NodeResultPlain] + private implicit lazy val resultFormat: RecordFormat[PlainNodeResult] = + RecordFormat[PlainNodeResult] + val deserializer: Deserializer[PlainNodeResult] = + ScalaReflectionSerde.reflectionDeserializer4S[PlainNodeResult] private val topic = "testtopic" @@ -56,7 +56,7 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec { "group.id" -> "test", "bootstrap.servers" -> kafka.getBootstrapServers ) - testConsumer = new KafkaConsumer[Bytes, NodeResultPlain]( + testConsumer = new KafkaConsumer[Bytes, PlainNodeResult]( config.asJava, Serdes.Bytes().deserializer(), deserializer @@ -113,7 +113,7 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec { Then("records can be fetched from Kafka") eventually(timeout(5 second), interval(1 second)) { testConsumer.seekToBeginning(topicPartitions.asJava) - val records: List[NodeResultPlain] = + val records: List[PlainNodeResult] = testConsumer.poll((1 second) toJava).asScala.map(_.value()).toList records should have length 3 From 9b9f64f6da1a94d58559918446c267e037795f07 Mon Sep 17 00:00:00 2001 From: t-ober <63147366+t-ober@users.noreply.github.com> Date: Wed, 15 Dec 2021 14:37:48 +0100 Subject: [PATCH 08/58] fmt --- .../io/result/ResultEntityKafkaSink.scala | 6 +++++- .../ie3/simona/io/result/ResultSinkType.scala | 3 ++- .../simona/io/result/plain/PlainResult.scala | 18 ++++++++---------- .../simona/io/result/plain/PlainWriter.scala | 12 +++++++++--- 4 files changed, 24 insertions(+), 15 deletions(-) diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala index 9f44410e4e..02ec8249ee 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala @@ -13,7 +13,11 @@ import edu.ie3.simona.io.result.plain.PlainWriter.NodeResultWriter import edu.ie3.simona.io.result.plain.{PlainResult, PlainWriter} import edu.ie3.util.scala.io.ScalaReflectionSerde.reflectionSerializer4S import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG -import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord} +import org.apache.kafka.clients.producer.{ + KafkaProducer, + ProducerConfig, + ProducerRecord +} import org.apache.kafka.common.serialization.{Serdes, Serializer} import java.util.{Properties, UUID} diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala b/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala index f74d450e90..600fd6c8dd 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala @@ -38,7 +38,8 @@ object ResultSinkType { sinkConfig: SimonaConfig.Simona.Output.Sink, runName: String ): ResultSinkType = { - val sink: Seq[Any] = Seq(sinkConfig.csv, sinkConfig.influxDb1x, sinkConfig.kafka).flatten + val sink: Seq[Any] = + Seq(sinkConfig.csv, sinkConfig.influxDb1x, sinkConfig.kafka).flatten if (sink.size > 1) throw new IllegalArgumentException( diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala index 613df98eb6..9b8f0ac830 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala @@ -11,14 +11,12 @@ import java.util.UUID sealed trait PlainResult object PlainResult { - case class PlainNodeResult( - simRunId: UUID, - time: String, - uuid: UUID, - inputModel: UUID, - vMag: Double, - vAng: Double - ) extends PlainResult + case class PlainNodeResult( + simRunId: UUID, + time: String, + uuid: UUID, + inputModel: UUID, + vMag: Double, + vAng: Double + ) extends PlainResult } - - diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala index 8f4df2b715..d8a74e03db 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala @@ -1,3 +1,9 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + package edu.ie3.simona.io.result.plain import edu.ie3.datamodel.models.result.{NodeResult, ResultEntity} @@ -20,10 +26,10 @@ sealed trait PlainWriter[F <: ResultEntity, P <: PlainResult] { } } -object PlainWriter{ +object PlainWriter { case class NodeResultWriter(runId: UUID) - extends PlainWriter[NodeResult, PlainNodeResult] { + extends PlainWriter[NodeResult, PlainNodeResult] { override def writePlain(full: NodeResult): PlainNodeResult = { PlainNodeResult( @@ -45,4 +51,4 @@ object PlainWriter{ ) } } -} \ No newline at end of file +} From 3d52e96fb33f8853c49f40dac1c14a1dc8a61d64 Mon Sep 17 00:00:00 2001 From: t-ober <63147366+t-ober@users.noreply.github.com> Date: Wed, 15 Dec 2021 14:48:34 +0100 Subject: [PATCH 09/58] update test --- .../ie3/simona/io/result/ResultEntityKafkaSpec.scala | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala index 3e7a8f444e..fd94aab2d2 100644 --- a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala +++ b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala @@ -8,7 +8,7 @@ package edu.ie3.simona.io.result import com.sksamuel.avro4s.RecordFormat import edu.ie3.datamodel.models.result.NodeResult -import edu.ie3.simona.io.result.plain.PlainNodeResult +import edu.ie3.simona.io.result.plain.PlainResult.PlainNodeResult import edu.ie3.simona.test.KafkaFlatSpec import edu.ie3.simona.test.KafkaFlatSpec.Topic import edu.ie3.util.quantities.PowerSystemUnits @@ -118,28 +118,31 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec { records should have length 3 records should contain( - NodeResultPlain( + PlainNodeResult( runId, nodeRes1.getTime.toString, nodeRes1.getUuid, + nodeRes1.getInputModel, nodeRes1.getvMag().getValue.doubleValue(), nodeRes1.getvAng().getValue.doubleValue() ) ) records should contain( - NodeResultPlain( + PlainNodeResult( runId, nodeRes2.getTime.toString, nodeRes2.getUuid, + nodeRes2.getInputModel, nodeRes2.getvMag().getValue.doubleValue(), nodeRes2.getvAng().getValue.doubleValue() ) ) records should contain( - NodeResultPlain( + PlainNodeResult( runId, nodeRes3.getTime.toString, nodeRes3.getUuid, + nodeRes3.getInputModel, nodeRes3.getvMag().getValue.doubleValue(), nodeRes3.getvAng().getValue.doubleValue() ) From 6fc64dfb9c81a8039be4e579740eae3e4bccba39 Mon Sep 17 00:00:00 2001 From: t-ober <63147366+t-ober@users.noreply.github.com> Date: Wed, 15 Dec 2021 14:48:43 +0100 Subject: [PATCH 10/58] adjust naming --- .../edu/ie3/simona/io/result/ResultEntityKafkaSink.scala | 4 ++-- .../scala/edu/ie3/simona/io/result/plain/PlainWriter.scala | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala index 02ec8249ee..b9d8f1f272 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultEntityKafkaSink.scala @@ -50,7 +50,7 @@ object ResultEntityKafkaSink { def apply[R]( topic: String, - runId: UUID, + simRunId: UUID, bootstrapServers: String, schemaRegistryUrl: String, linger: Int @@ -71,7 +71,7 @@ object ResultEntityKafkaSink { case NodeResClass => implicit val recordFormat: RecordFormat[PlainNodeResult] = RecordFormat[PlainNodeResult] - createSink(schemaRegistryUrl, props, topic, NodeResultWriter(runId)) + createSink(schemaRegistryUrl, props, topic, NodeResultWriter(simRunId)) } } diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala index d8a74e03db..75f21cd180 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala @@ -28,12 +28,12 @@ sealed trait PlainWriter[F <: ResultEntity, P <: PlainResult] { object PlainWriter { - case class NodeResultWriter(runId: UUID) + case class NodeResultWriter(simRunId: UUID) extends PlainWriter[NodeResult, PlainNodeResult] { override def writePlain(full: NodeResult): PlainNodeResult = { PlainNodeResult( - runId, + simRunId, createSimpleTimeStamp(full.getTime), full.getUuid, full.getInputModel, From b76cc69dd1e2d12e4583c163e0d1e8eb7beb93ab Mon Sep 17 00:00:00 2001 From: t-ober <63147366+t-ober@users.noreply.github.com> Date: Mon, 20 Dec 2021 12:23:39 +0100 Subject: [PATCH 11/58] adjust dockerfile to use env variable --- Dockerfile | 8 +++++--- build.gradle | 11 +++++++++++ 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 93b3e6aafc..9a24ce67d3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,19 +5,21 @@ FROM openjdk:8-jre-slim # e.g.: docker build --build-arg version=1.0 --build-arg snapshotSuffix=-SNAPSHOT -t simona . # run by mounting directory # e.g. docker run -v `realpath inputData`:/inputData --rm simona + # note: this does not work for windows so you have to enter the absolute path manually and escape the \'s ARG version # snapshot suffix for jar files is "-SNAPSHOT" ARG snapshotSuffix="" ENV jarFile="simona-${version}${snapshotSuffix}-all.jar" +ENV config="" RUN mkdir exec -RUN mkdir inputData +RUN mkdir input # copy simona fat jar into container COPY build/libs/$jarFile exec/ # inputData is mounted upon running -VOLUME /inputData +VOLUME /input -ENTRYPOINT ["sh", "-c", "java -cp exec/${jarFile} edu.ie3.simona.main.RunSimonaStandalone --config=inputData/vn_simona/vn_simona.conf"] \ No newline at end of file +ENTRYPOINT ["sh", "-c", "java -jar exec/${jarFile} --config=${config}"] \ No newline at end of file diff --git a/build.gradle b/build.gradle index be5966f55c..a23eceb7af 100644 --- a/build.gradle +++ b/build.gradle @@ -143,6 +143,17 @@ tasks.withType(JavaCompile) { options.encoding = 'UTF-8' } + +// 'Class-Path': configurations.compile.collect { it.getName() }.join(' '), + +jar { + manifest { + attributes( + 'Main-Class': 'edu.ie3.simona.main.RunSimonaStandalone' + ) + } +} + ////////////////////////////////////////////////////////////////////// // Build akka'able fat jar using the gradle shadow plugin // see http://www.sureshpw.com/2015/10/building-akka-bundle-with-all.html From 6bf11ea826385c242f28af43f1c5584207c12f38 Mon Sep 17 00:00:00 2001 From: Sebastian Peter <14994800+sebastian-peter@users.noreply.github.com> Date: Mon, 10 Jan 2022 17:25:34 +0100 Subject: [PATCH 12/58] Updating kafka-related dependencies --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 2cd12b94fe..770a9d65de 100644 --- a/build.gradle +++ b/build.gradle @@ -131,8 +131,8 @@ dependencies { /* Kafka */ implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.0.0' - implementation "io.confluent:kafka-streams-avro-serde:5.5.0" - implementation "com.sksamuel.avro4s:avro4s-core_2.13:4.0.11" + implementation 'io.confluent:kafka-streams-avro-serde:6.2.2' + implementation 'com.sksamuel.avro4s:avro4s-core_2.13:4.0.12' testImplementation 'org.testcontainers:kafka:1.16.2' // kafka testing implementation 'org.apache.commons:commons-math3:3.6.1' // apache commons math3 From 77ad35742dd5a46e031152c294903c20f2f066bf Mon Sep 17 00:00:00 2001 From: Sebastian Peter <14994800+sebastian-peter@users.noreply.github.com> Date: Mon, 10 Jan 2022 17:27:11 +0100 Subject: [PATCH 13/58] fmt --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 770a9d65de..134510dfd1 100644 --- a/build.gradle +++ b/build.gradle @@ -156,8 +156,8 @@ tasks.withType(JavaCompile) { jar { manifest { attributes( - 'Main-Class': 'edu.ie3.simona.main.RunSimonaStandalone' - ) + 'Main-Class': 'edu.ie3.simona.main.RunSimonaStandalone' + ) } } From ffe0a4fd90ee867cb9c3b58a9cf4e57ee14e7ea1 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Fri, 22 Apr 2022 10:41:56 +0200 Subject: [PATCH 14/58] Fixing kafka test --- .../ie3/simona/io/result/plain/PlainWriter.scala | 10 +++++----- .../simona/io/result/ResultEntityKafkaSpec.scala | 14 ++++++++------ 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala index 75f21cd180..d2d15582f7 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala @@ -19,14 +19,11 @@ sealed trait PlainWriter[F <: ResultEntity, P <: PlainResult] { def writePlain(full: F): P def createFull(plain: P): F - - def createSimpleTimeStamp(dateTime: ZonedDateTime): String = { - val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") - dateTime.format(formatter) - } } object PlainWriter { + private lazy val timeFormatter = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") case class NodeResultWriter(simRunId: UUID) extends PlainWriter[NodeResult, PlainNodeResult] { @@ -51,4 +48,7 @@ object PlainWriter { ) } } + + def createSimpleTimeStamp(dateTime: ZonedDateTime): String = + dateTime.format(timeFormatter) } diff --git a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala index fd94aab2d2..bb1736a691 100644 --- a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala +++ b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala @@ -9,6 +9,7 @@ package edu.ie3.simona.io.result import com.sksamuel.avro4s.RecordFormat import edu.ie3.datamodel.models.result.NodeResult import edu.ie3.simona.io.result.plain.PlainResult.PlainNodeResult +import edu.ie3.simona.io.result.plain.PlainWriter import edu.ie3.simona.test.KafkaFlatSpec import edu.ie3.simona.test.KafkaFlatSpec.Topic import edu.ie3.util.quantities.PowerSystemUnits @@ -18,6 +19,7 @@ import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.serialization.{Deserializer, Serdes} import org.apache.kafka.common.utils.Bytes +import org.scalatest.OptionValues import tech.units.indriya.quantity.Quantities import java.time.ZonedDateTime @@ -30,7 +32,7 @@ import scala.language.postfixOps /** Adapted from * https://kafka-tutorials.confluent.io/produce-consume-lang/scala.html */ -class ResultEntityKafkaSpec extends KafkaFlatSpec { +class ResultEntityKafkaSpec extends KafkaFlatSpec with OptionValues { var testConsumer: KafkaConsumer[Bytes, PlainNodeResult] = _ @@ -104,9 +106,9 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec { resultEntitySink.handleResultEntity(nodeRes2) resultEntitySink.handleResultEntity(nodeRes3) + val testTopic = testTopics.headOption.value val topicPartitions: Seq[TopicPartition] = - (0 until testTopics.head.partitions) - .map(new TopicPartition(testTopics.head.name, _)) + (0 until testTopic.partitions).map(new TopicPartition(testTopic.name, _)) testConsumer.assign(topicPartitions.asJava) @@ -120,7 +122,7 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec { records should contain( PlainNodeResult( runId, - nodeRes1.getTime.toString, + PlainWriter.createSimpleTimeStamp(nodeRes1.getTime), nodeRes1.getUuid, nodeRes1.getInputModel, nodeRes1.getvMag().getValue.doubleValue(), @@ -130,7 +132,7 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec { records should contain( PlainNodeResult( runId, - nodeRes2.getTime.toString, + PlainWriter.createSimpleTimeStamp(nodeRes2.getTime), nodeRes2.getUuid, nodeRes2.getInputModel, nodeRes2.getvMag().getValue.doubleValue(), @@ -140,7 +142,7 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec { records should contain( PlainNodeResult( runId, - nodeRes3.getTime.toString, + PlainWriter.createSimpleTimeStamp(nodeRes3.getTime), nodeRes3.getUuid, nodeRes3.getInputModel, nodeRes3.getvMag().getValue.doubleValue(), From 52b1af664b5e447b3d2cdbc21b650856bab1e846 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Fri, 22 Apr 2022 17:04:12 +0200 Subject: [PATCH 15/58] Adapting Dockerfile to recent changes --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9a24ce67d3..d7a42fac6d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,10 @@ -FROM openjdk:8-jre-slim +FROM openjdk:17-slim # USAGE: # build with ARG version and if applicable with ARG snapshot suffix - # e.g.: docker build --build-arg version=1.0 --build-arg snapshotSuffix=-SNAPSHOT -t simona . + # e.g.: docker build --build-arg version=2.1.0 --build-arg snapshotSuffix=-SNAPSHOT -t simona . # run by mounting directory - # e.g. docker run -v `realpath inputData`:/inputData --rm simona + # e.g. docker run -v `realpath input`:/input --rm simona # note: this does not work for windows so you have to enter the absolute path manually and escape the \'s ARG version From 5a74143f549e0e602421fc71d24bc9e2c7d1e94a Mon Sep 17 00:00:00 2001 From: Daniel Feismann <98817556+danielfeismann@users.noreply.github.com> Date: Fri, 20 May 2022 15:08:45 +0200 Subject: [PATCH 16/58] Update Changelog --- CHANGELOG.md | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 60ce8d035f..7987ddf5ee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - Implement SQL source for primary data [#34](https://github.com/ie3-institute/simona/issues/34), [#101](https://github.com/ie3-institute/simona/issues/101) +- Relevant scientific papers have been added to the documentation [#139](https://github.com/ie3-institute/simona/issues/139) +- Add troubleshooting section to Users guide [#160](https://github.com/ie3-institute/simona/issues/160) +- Adapt documentation to changed simonaAPI [#191](https://github.com/ie3-institute/simona/issues/191) +- Implementing a new plugin framework for external simulations [#195](https://github.com/ie3-institute/simona/issues/195) + ### Changed - Re-organizing test resources into their respective packages [#105](https://github.com/ie3-institute/simona/issues/105) @@ -15,14 +20,30 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Simplified PrimaryServiceProxy due to changes in PSDM [#120](https://github.com/ie3-institute/simona/issues/120) - Improved handling of weights and their sum in determination of weather data [#173](https://github.com/ie3-institute/simona/issues/173) - Improving code readability in EvcsAgent by moving FreeLotsRequest to separate methods [#19](https://github.com/ie3-institute/simona/issues/19) +- Ignore dependabot snapshot dependencies [#27](https://github.com/ie3-institute/simona/issues/27) - Sending termination message to external simulation on expected and unexpected shutdowns of SIMONA [#35](https://github.com/ie3-institute/simona/issues/35) +- Change transformer calculation since changes in PSDM [#99](https://github.com/ie3-institute/simona/issues/99) +- Adapt to changed PvInputModel of PSDM (elevationAngle) [#100](https://github.com/ie3-institute/simona/issues/100) +- Re-organizing test resources to packages [#105](https://github.com/ie3-institute/simona/issues/105) +- Consolidate csv parameterization in config [#149](https://github.com/ie3-institute/simona/issues/149) +- Change weather scheme to COSMO [PR#154](https://github.com/ie3-institute/simona/pull/154) - Improved implementation of `RefSystemParser` [#212](https://github.com/ie3-institute/simona/issues/212) -- Removed Gradle task puml2png (Converting Plantuml to png / svg files) since it is no longer needed [#230](https://github.com/ie3-institute/simona/issues/230) - Harmonized configuration of csv parameters [#149](https://github.com/ie3-institute/simona/issues/149) + ### Fixed - Location of `vn_simona` test grid (was partially in Berlin and Dortmund) - Let `ParticipantAgent` die after failed registration with secondary services (prevents stuck simulation) - Fix default resolution of weather source wrapper [#78](https://github.com/ie3-institute/simona/issues/78) +- Fix invalid thread allocation in GridAgent [#111](https://github.com/ie3-institute/simona/issues/111) +- Fixed config auto-generation [#131](https://github.com/ie3-institute/simona/pull/131) +- Fixed genConfigSample gradle task[#148(https://github.com/ie3-institute/simona/issues/148) +- Fixed some unreachable code [#167](https://github.com/ie3-institute/simona/issues/167) +- Fix treatment of non-InitializeTrigger triggers in initialization within SimScheduler [#237](https://github.com/ie3-institute/simona/issues/237) + +### Removed +- Remove workaround for tscfg tmp directory [#178](https://github.com/ie3-institute/simona/issues/178) +- Removed Gradle task puml2png (Converting Plantuml to png / svg files) since it is no longer needed [#228](https://github.com/ie3-institute/simona/issues/228) +- Remove RocketChat notification from Jenkinsfile [#234](https://github.com/ie3-institute/simona/issues/234) [Unreleased]: https://github.com/ie3-institute/simona/compare/a14a093239f58fca9b2b974712686b33e5e5f939...HEAD From 6f51841aea54b1ac5e1aa9facf7e14e876f8ba40 Mon Sep 17 00:00:00 2001 From: Daniel Feismann <98817556+danielfeismann@users.noreply.github.com> Date: Mon, 23 May 2022 09:16:06 +0200 Subject: [PATCH 17/58] Integrate reviewers comments --- CHANGELOG.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7987ddf5ee..71744bce6c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,8 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Implement SQL source for primary data [#34](https://github.com/ie3-institute/simona/issues/34), [#101](https://github.com/ie3-institute/simona/issues/101) - Relevant scientific papers have been added to the documentation [#139](https://github.com/ie3-institute/simona/issues/139) - Add troubleshooting section to Users guide [#160](https://github.com/ie3-institute/simona/issues/160) -- Adapt documentation to changed simonaAPI [#191](https://github.com/ie3-institute/simona/issues/191) -- Implementing a new plugin framework for external simulations [#195](https://github.com/ie3-institute/simona/issues/195) + ### Changed @@ -24,20 +23,22 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Sending termination message to external simulation on expected and unexpected shutdowns of SIMONA [#35](https://github.com/ie3-institute/simona/issues/35) - Change transformer calculation since changes in PSDM [#99](https://github.com/ie3-institute/simona/issues/99) - Adapt to changed PvInputModel of PSDM (elevationAngle) [#100](https://github.com/ie3-institute/simona/issues/100) -- Re-organizing test resources to packages [#105](https://github.com/ie3-institute/simona/issues/105) - Consolidate csv parameterization in config [#149](https://github.com/ie3-institute/simona/issues/149) - Change weather scheme to COSMO [PR#154](https://github.com/ie3-institute/simona/pull/154) +- Adapt documentation to changed simonaAPI [#191](https://github.com/ie3-institute/simona/issues/191) +- Implementing a new plugin framework for external simulations [#195](https://github.com/ie3-institute/simona/issues/195) - Improved implementation of `RefSystemParser` [#212](https://github.com/ie3-institute/simona/issues/212) - Harmonized configuration of csv parameters [#149](https://github.com/ie3-institute/simona/issues/149) - ### Fixed - Location of `vn_simona` test grid (was partially in Berlin and Dortmund) - Let `ParticipantAgent` die after failed registration with secondary services (prevents stuck simulation) +- Fix location of "vn_simona" test grid [#72](https://github.com/ie3-institute/simona/issues/72) +- Participant agents die on failed registration [#76](https://github.com/ie3-institute/simona/issues/76) - Fix default resolution of weather source wrapper [#78](https://github.com/ie3-institute/simona/issues/78) - Fix invalid thread allocation in GridAgent [#111](https://github.com/ie3-institute/simona/issues/111) - Fixed config auto-generation [#131](https://github.com/ie3-institute/simona/pull/131) -- Fixed genConfigSample gradle task[#148(https://github.com/ie3-institute/simona/issues/148) +- Fixed genConfigSample gradle task[#148](https://github.com/ie3-institute/simona/issues/148) - Fixed some unreachable code [#167](https://github.com/ie3-institute/simona/issues/167) - Fix treatment of non-InitializeTrigger triggers in initialization within SimScheduler [#237](https://github.com/ie3-institute/simona/issues/237) From 08d3a9ec7079a561917ec5bc3d99bf5c09f449ce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 May 2022 09:17:13 +0200 Subject: [PATCH 18/58] Bump scala-logging_2.13 from 3.9.4 to 3.9.5 (#241) Bumps [scala-logging_2.13](https://github.com/lightbend/scala-logging) from 3.9.4 to 3.9.5. - [Release notes](https://github.com/lightbend/scala-logging/releases) - [Commits](https://github.com/lightbend/scala-logging/compare/v3.9.4...v3.9.5) --- updated-dependencies: - dependency-name: com.typesafe.scala-logging:scala-logging_2.13 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 35d0895633..a8e6b5904d 100644 --- a/build.gradle +++ b/build.gradle @@ -97,7 +97,7 @@ dependencies { } /* logging */ - implementation "com.typesafe.scala-logging:scala-logging_${scalaVersion}:3.9.4" // akka scala logging + implementation "com.typesafe.scala-logging:scala-logging_${scalaVersion}:3.9.5" // akka scala logging implementation "ch.qos.logback:logback-classic:1.2.11" /* testing */ From 2a55ee3a24029da81325c72b5253554ec8b4fdb3 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Mon, 23 May 2022 12:28:53 +0200 Subject: [PATCH 19/58] Fixing some code smells --- .../edu/ie3/simona/event/listener/ResultEventListener.scala | 4 ++-- .../scala/edu/ie3/simona/io/result/plain/PlainResult.scala | 2 +- .../scala/edu/ie3/simona/io/result/plain/PlainWriter.scala | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala index 611071e998..5054bca294 100644 --- a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala +++ b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala @@ -151,8 +151,8 @@ object ResultEventListener extends Transformer3wResultSupport { linger ) => val clzs: Iterable[Class[_ <: ResultEntity]] = Set( - classOf[NodeResult] - ) // TODO add classOf[LineResult] + classOf[NodeResult] // currently, only NodeResults are sent out + ) clzs.map(clz => Future.successful( ( diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala index 9b8f0ac830..f9bd3489b0 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala @@ -11,7 +11,7 @@ import java.util.UUID sealed trait PlainResult object PlainResult { - case class PlainNodeResult( + final case class PlainNodeResult( simRunId: UUID, time: String, uuid: UUID, diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala index d2d15582f7..d9b5ca1d67 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala @@ -25,7 +25,7 @@ object PlainWriter { private lazy val timeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") - case class NodeResultWriter(simRunId: UUID) + final case class NodeResultWriter(simRunId: UUID) extends PlainWriter[NodeResult, PlainNodeResult] { override def writePlain(full: NodeResult): PlainNodeResult = { From 0b4a94f066e56d381e6de094455d004bd8b11abc Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Mon, 23 May 2022 14:50:02 +0200 Subject: [PATCH 20/58] Improving Dockerfile --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index d7a42fac6d..0de30978b8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,11 +14,11 @@ ARG snapshotSuffix="" ENV jarFile="simona-${version}${snapshotSuffix}-all.jar" ENV config="" -RUN mkdir exec -RUN mkdir input +RUN mkdir exec \ + && mkdir input # copy simona fat jar into container -COPY build/libs/$jarFile exec/ +COPY build/libs/$jarFile /exec/ # inputData is mounted upon running VOLUME /input From 6b125af91232346b654f1c905a83ff9f3a1dcb2e Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Mon, 23 May 2022 15:01:55 +0200 Subject: [PATCH 21/58] Upgrading kafka-streams-avro-serde --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 3e109ae9aa..2822eb1f12 100644 --- a/build.gradle +++ b/build.gradle @@ -144,7 +144,7 @@ dependencies { /* Kafka */ implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.0.0' - implementation 'io.confluent:kafka-streams-avro-serde:6.2.2' + implementation 'io.confluent:kafka-streams-avro-serde:6.2.4' implementation 'com.sksamuel.avro4s:avro4s-core_2.13:4.0.12' testImplementation 'org.testcontainers:kafka:1.16.2' // kafka testing From 6d11823d3f1d34640db1aa4bec83c5a042943dba Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Mon, 23 May 2022 15:05:49 +0200 Subject: [PATCH 22/58] Upgrading avro4s:avro4s-core --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 2822eb1f12..d2f7b68dbf 100644 --- a/build.gradle +++ b/build.gradle @@ -145,7 +145,7 @@ dependencies { /* Kafka */ implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.0.0' implementation 'io.confluent:kafka-streams-avro-serde:6.2.4' - implementation 'com.sksamuel.avro4s:avro4s-core_2.13:4.0.12' + implementation "com.sksamuel.avro4s:avro4s-core_${scalaVersion}:4.0.13" testImplementation 'org.testcontainers:kafka:1.16.2' // kafka testing implementation 'org.apache.commons:commons-math3:3.6.1' // apache commons math3 From 620bbe4da2da63e1cbb802b15359be3446d7180d Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Mon, 23 May 2022 15:20:57 +0200 Subject: [PATCH 23/58] Switching to testcontainers-scala for kafka testcontainers --- build.gradle | 2 +- .../ie3/simona/io/result/ResultEntityKafkaSpec.scala | 4 ++-- src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala | 10 +++++----- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/build.gradle b/build.gradle index d2f7b68dbf..f48d859e8e 100644 --- a/build.gradle +++ b/build.gradle @@ -113,6 +113,7 @@ dependencies { // testcontainers testImplementation "com.dimafeng:testcontainers-scala-scalatest_${scalaVersion}:${testContainerVersion}" testImplementation "com.dimafeng:testcontainers-scala-postgresql_${scalaVersion}:${testContainerVersion}" + testImplementation "com.dimafeng:testcontainers-scala-kafka_${scalaVersion}:${testContainerVersion}" /* --- Scala libs --- */ /* CORE Scala */ @@ -146,7 +147,6 @@ dependencies { implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.0.0' implementation 'io.confluent:kafka-streams-avro-serde:6.2.4' implementation "com.sksamuel.avro4s:avro4s-core_${scalaVersion}:4.0.13" - testImplementation 'org.testcontainers:kafka:1.16.2' // kafka testing implementation 'org.apache.commons:commons-math3:3.6.1' // apache commons math3 implementation 'org.apache.poi:poi-ooxml:5.2.2' // used for FilenameUtils diff --git a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala index bb1736a691..d602890016 100644 --- a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala +++ b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala @@ -56,7 +56,7 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec with OptionValues { super.beforeAll() val config = Map[String, AnyRef]( "group.id" -> "test", - "bootstrap.servers" -> kafka.getBootstrapServers + "bootstrap.servers" -> kafka.bootstrapServers ) testConsumer = new KafkaConsumer[Bytes, PlainNodeResult]( config.asJava, @@ -76,7 +76,7 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec with OptionValues { val resultEntitySink = ResultEntityKafkaSink[NodeResult]( topic, runId, - kafka.getBootstrapServers, + kafka.bootstrapServers, mockSchemaRegistryUrl, 0 ) diff --git a/src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala b/src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala index d0d144e80a..014aee4c30 100644 --- a/src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala +++ b/src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala @@ -6,6 +6,7 @@ package edu.ie3.simona.test +import com.dimafeng.testcontainers.KafkaContainer import edu.ie3.simona.test.KafkaFlatSpec.Topic import org.apache.kafka.clients.admin.{Admin, NewTopic} import org.junit.Rule @@ -13,7 +14,6 @@ import org.scalatest.concurrent.Eventually import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.{BeforeAndAfterAll, GivenWhenThen, Inspectors} -import org.testcontainers.containers.KafkaContainer import org.testcontainers.utility.DockerImageName import scala.jdk.CollectionConverters._ @@ -29,14 +29,14 @@ trait KafkaFlatSpec with GivenWhenThen with Eventually { - val testTopics: Vector[Topic] + protected val testTopics: Vector[Topic] @Rule - val kafka = new KafkaContainer( + protected val kafka: KafkaContainer = KafkaContainer( DockerImageName.parse("confluentinc/cp-kafka:6.1.0") ) - lazy val admin: Admin = Admin.create( - Map[String, AnyRef]("bootstrap.servers" -> kafka.getBootstrapServers).asJava + protected lazy val admin: Admin = Admin.create( + Map[String, AnyRef]("bootstrap.servers" -> kafka.bootstrapServers).asJava ) override def beforeAll(): Unit = { From 00017a932f4acfb82ba426bdefafc37bbdc5568c Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Mon, 23 May 2022 15:47:22 +0200 Subject: [PATCH 24/58] Upgrading kafka-clients --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index f48d859e8e..c7c4b12ffb 100644 --- a/build.gradle +++ b/build.gradle @@ -144,7 +144,7 @@ dependencies { scalaCompilerPlugin "com.sksamuel.scapegoat:scalac-scapegoat-plugin_${scalaBinaryVersion}:${scapegoatVersion}" /* Kafka */ - implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.0.0' + implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.2.0' implementation 'io.confluent:kafka-streams-avro-serde:6.2.4' implementation "com.sksamuel.avro4s:avro4s-core_${scalaVersion}:4.0.13" From 00fa31886cb2f5a52187cd9f3e9baa0ca4235ed2 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Mon, 23 May 2022 16:50:40 +0200 Subject: [PATCH 25/58] Removing dead code --- build.gradle | 3 --- 1 file changed, 3 deletions(-) diff --git a/build.gradle b/build.gradle index c7c4b12ffb..23a21ada78 100644 --- a/build.gradle +++ b/build.gradle @@ -163,9 +163,6 @@ tasks.withType(JavaCompile) { options.encoding = 'UTF-8' } - -// 'Class-Path': configurations.compile.collect { it.getName() }.join(' '), - jar { manifest { attributes( From 0c49b1023012c2f43ca1e8d9ff82dabff5757203 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Mon, 23 May 2022 17:17:11 +0200 Subject: [PATCH 26/58] Remove comment --- src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala b/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala index 831c970102..1e646d7ddc 100644 --- a/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala +++ b/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala @@ -174,7 +174,6 @@ case object ConfigFailFast extends LazyLogging { sinkConfigs.find(_.isDefined) match { case Some(Some(influxDb1x: InfluxDb1x)) => - // if this is db sink, check the connection checkInfluxDb1xParams( "Sink", ResultSinkType.buildInfluxDb1xUrl(influxDb1x), From f7a1a2208ec86deab75775ac97e2521dc0747994 Mon Sep 17 00:00:00 2001 From: Daniel Feismann <98817556+danielfeismann@users.noreply.github.com> Date: Tue, 24 May 2022 09:10:31 +0200 Subject: [PATCH 27/58] Integrate reviewers comments and include issue 156 --- CHANGELOG.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 71744bce6c..870ee4c22d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,12 +29,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Implementing a new plugin framework for external simulations [#195](https://github.com/ie3-institute/simona/issues/195) - Improved implementation of `RefSystemParser` [#212](https://github.com/ie3-institute/simona/issues/212) - Harmonized configuration of csv parameters [#149](https://github.com/ie3-institute/simona/issues/149) +- Changing the export methode for diagrams [#156](https://github.com/ie3-institute/simona/issues/156) ### Fixed -- Location of `vn_simona` test grid (was partially in Berlin and Dortmund) -- Let `ParticipantAgent` die after failed registration with secondary services (prevents stuck simulation) -- Fix location of "vn_simona" test grid [#72](https://github.com/ie3-institute/simona/issues/72) -- Participant agents die on failed registration [#76](https://github.com/ie3-institute/simona/issues/76) +- Location of `vn_simona` test grid (was partially in Berlin and Dortmund) [#72](https://github.com/ie3-institute/simona/issues/72) +- Let `ParticipantAgent` die after failed registration with secondary services (prevents stuck simulation) [#76](https://github.com/ie3-institute/simona/issues/76) - Fix default resolution of weather source wrapper [#78](https://github.com/ie3-institute/simona/issues/78) - Fix invalid thread allocation in GridAgent [#111](https://github.com/ie3-institute/simona/issues/111) - Fixed config auto-generation [#131](https://github.com/ie3-institute/simona/pull/131) From f56fdd3ba7f966dcd161b04123734cd96895cc7d Mon Sep 17 00:00:00 2001 From: Daniel Feismann <98817556+danielfeismann@users.noreply.github.com> Date: Tue, 24 May 2022 10:12:20 +0200 Subject: [PATCH 28/58] Add documenation related issues to changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 870ee4c22d..578d2b50d8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,7 +29,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Implementing a new plugin framework for external simulations [#195](https://github.com/ie3-institute/simona/issues/195) - Improved implementation of `RefSystemParser` [#212](https://github.com/ie3-institute/simona/issues/212) - Harmonized configuration of csv parameters [#149](https://github.com/ie3-institute/simona/issues/149) +- Include missing images into Documentation [#151](https://github.com/ie3-institute/simona/issues/151) - Changing the export methode for diagrams [#156](https://github.com/ie3-institute/simona/issues/156) +- Change references implementation in Documentation to bibtex [#174](https://github.com/ie3-institute/simona/issues/174) +- Update Model descriptions (Documentation) [#122](https://github.com/ie3-institute/simona/issues/122) +- Changes of Getting Started Section (Documentation) [#124](https://github.com/ie3-institute/simona/issues/124) +- Update gradle [#176](https://github.com/ie3-institute/simona/issues/176) ### Fixed - Location of `vn_simona` test grid (was partially in Berlin and Dortmund) [#72](https://github.com/ie3-institute/simona/issues/72) From 899d5e073514e822e5d4d46f9a47e997b98c8d83 Mon Sep 17 00:00:00 2001 From: Daniel Feismann <98817556+danielfeismann@users.noreply.github.com> Date: Tue, 24 May 2022 15:01:53 +0200 Subject: [PATCH 29/58] add more changes --- CHANGELOG.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 578d2b50d8..2a5c3b9d75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,20 +28,23 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Adapt documentation to changed simonaAPI [#191](https://github.com/ie3-institute/simona/issues/191) - Implementing a new plugin framework for external simulations [#195](https://github.com/ie3-institute/simona/issues/195) - Improved implementation of `RefSystemParser` [#212](https://github.com/ie3-institute/simona/issues/212) -- Harmonized configuration of csv parameters [#149](https://github.com/ie3-institute/simona/issues/149) - Include missing images into Documentation [#151](https://github.com/ie3-institute/simona/issues/151) - Changing the export methode for diagrams [#156](https://github.com/ie3-institute/simona/issues/156) - Change references implementation in Documentation to bibtex [#174](https://github.com/ie3-institute/simona/issues/174) - Update Model descriptions (Documentation) [#122](https://github.com/ie3-institute/simona/issues/122) - Changes of Getting Started Section (Documentation) [#124](https://github.com/ie3-institute/simona/issues/124) - Update gradle [#176](https://github.com/ie3-institute/simona/issues/176) +- Setting java version to 17 [#58](https://github.com/ie3-institute/simona/issues/58) +- Made SimonaConfig.BaseRuntimeConfig serializable [#36](https://github.com/ie3-institute/simona/issues/36) +- Adapt to new simonaAPI snapshot [#95](https://github.com/ie3-institute/simona/issues/95) +- Update Sphinx to 4.5.0 as well as extensions [#214](https://github.com/ie3-institute/simona/issues/214) ### Fixed - Location of `vn_simona` test grid (was partially in Berlin and Dortmund) [#72](https://github.com/ie3-institute/simona/issues/72) - Let `ParticipantAgent` die after failed registration with secondary services (prevents stuck simulation) [#76](https://github.com/ie3-institute/simona/issues/76) - Fix default resolution of weather source wrapper [#78](https://github.com/ie3-institute/simona/issues/78) - Fix invalid thread allocation in GridAgent [#111](https://github.com/ie3-institute/simona/issues/111) -- Fixed config auto-generation [#131](https://github.com/ie3-institute/simona/pull/131) +- Fixed config auto-generation [#130](https://github.com/ie3-institute/simona/issues/130) - Fixed genConfigSample gradle task[#148](https://github.com/ie3-institute/simona/issues/148) - Fixed some unreachable code [#167](https://github.com/ie3-institute/simona/issues/167) - Fix treatment of non-InitializeTrigger triggers in initialization within SimScheduler [#237](https://github.com/ie3-institute/simona/issues/237) From b90c800cd5e14cc378b2ae696db602b25b0e11ce Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Fri, 27 May 2022 12:06:42 +0200 Subject: [PATCH 30/58] Updating PowerSystemUtils to 2.0-SNAPSHOT --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index a8e6b5904d..03b0d99cf6 100644 --- a/build.gradle +++ b/build.gradle @@ -70,7 +70,7 @@ dependencies { } // ie³ internal repository - implementation('com.github.ie3-institute:PowerSystemUtils:1.6') { + implementation('com.github.ie3-institute:PowerSystemUtils:2.0-SNAPSHOT') { exclude group: 'org.apache.logging.log4j' exclude group: 'org.slf4j' /* Exclude our own nested dependencies */ From b862524905352fdcb02e97a62e9445b9c6412b45 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Fri, 27 May 2022 12:21:10 +0200 Subject: [PATCH 31/58] Adapting tests to changed dependency --- .../service/weather/WeatherSourceSpec.scala | 18 +++++++++--------- .../weather/WeatherSourceWrapperSpec.scala | 14 +++++++------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceSpec.scala b/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceSpec.scala index 8988ddf682..987d42283b 100644 --- a/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceSpec.scala +++ b/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceSpec.scala @@ -27,7 +27,7 @@ import scala.jdk.OptionConverters._ import scala.util.{Failure, Success} class WeatherSourceSpec extends UnitSpec { - private val coordinate0 = GeoUtils.xyToPoint(7.41, 51.47) + private val coordinate0 = GeoUtils.buildPoint(51.47, 7.41) "A weather source" should { "issue a ServiceException, if there are not enough coordinates available" in { @@ -300,14 +300,14 @@ class WeatherSourceSpec extends UnitSpec { } case object WeatherSourceSpec { - private val coordinate67775 = GeoUtils.xyToPoint(7.438, 51.5) - private val coordinate531137 = GeoUtils.xyToPoint(7.375, 51.5) - private val coordinate551525 = GeoUtils.xyToPoint(7.438, 51.438) - private val coordinate278150 = GeoUtils.xyToPoint(7.375, 51.438) - private val coordinate477295 = GeoUtils.xyToPoint(12.812, 52.312) - private val coordinate537947 = GeoUtils.xyToPoint(12.812, 52.25) - private val coordinate144112 = GeoUtils.xyToPoint(12.875, 52.312) - private val coordinate165125 = GeoUtils.xyToPoint(12.875, 52.25) + private val coordinate67775 = GeoUtils.buildPoint(51.5, 7.438) + private val coordinate531137 = GeoUtils.buildPoint(51.5, 7.375) + private val coordinate551525 = GeoUtils.buildPoint(51.438, 7.438) + private val coordinate278150 = GeoUtils.buildPoint(51.438, 7.375) + private val coordinate477295 = GeoUtils.buildPoint(52.312, 12.812) + private val coordinate537947 = GeoUtils.buildPoint(52.25, 12.812) + private val coordinate144112 = GeoUtils.buildPoint(52.312, 12.875) + private val coordinate165125 = GeoUtils.buildPoint(52.25, 12.875) case object DummyWeatherSource extends WeatherSource { override protected val idCoordinateSource: IdCoordinateSource = diff --git a/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceWrapperSpec.scala b/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceWrapperSpec.scala index 8b0502c6cf..96f83968bc 100644 --- a/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceWrapperSpec.scala +++ b/src/test/scala/edu/ie3/simona/service/weather/WeatherSourceWrapperSpec.scala @@ -290,13 +290,13 @@ class WeatherSourceWrapperSpec extends UnitSpec { object WeatherSourceWrapperSpec { // lat/lon are irrelevant, we will manually create weights later on - private val coordinate1a = GeoUtils.xyToPoint(6, 51) - private val coordinate1b = GeoUtils.xyToPoint(7, 51) - private val coordinate1c = GeoUtils.xyToPoint(8, 51) - private val coordinate1d = GeoUtils.xyToPoint(9, 51) - private val coordinate13 = GeoUtils.xyToPoint(10, 51) - private val coordinate13NoTemp = GeoUtils.xyToPoint(10, 52) - private val coordinateEmpty = GeoUtils.xyToPoint(10, 53) + private val coordinate1a = GeoUtils.buildPoint(51, 6) + private val coordinate1b = GeoUtils.buildPoint(51, 7) + private val coordinate1c = GeoUtils.buildPoint(51, 8) + private val coordinate1d = GeoUtils.buildPoint(51, 9) + private val coordinate13 = GeoUtils.buildPoint(51, 10) + private val coordinate13NoTemp = GeoUtils.buildPoint(52, 10) + private val coordinateEmpty = GeoUtils.buildPoint(53, 10) case object DummyPsdmWeatherSource extends PsdmWeatherSource { From ca0cd5d9bd8f8ed634e920068c8d43c7c5c73265 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Sat, 28 May 2022 13:28:17 +0200 Subject: [PATCH 32/58] Adding to changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 60ce8d035f..6bf348dbe4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,7 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Re-organizing test resources into their respective packages [#105](https://github.com/ie3-institute/simona/issues/105) -- BREAKING: Using snapshot version of PSDM +- BREAKING: Using snapshot version of PSDM and PSU - Simplified PrimaryServiceProxy due to changes in PSDM [#120](https://github.com/ie3-institute/simona/issues/120) - Improved handling of weights and their sum in determination of weather data [#173](https://github.com/ie3-institute/simona/issues/173) - Improving code readability in EvcsAgent by moving FreeLotsRequest to separate methods [#19](https://github.com/ie3-institute/simona/issues/19) From d6f08a8e4d65da729538d0901bfda4d3d8951a39 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 May 2022 07:11:05 +0000 Subject: [PATCH 33/58] Bump mockito-core from 4.5.1 to 4.6.0 (#245) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 03b0d99cf6..3ae41591bd 100644 --- a/build.gradle +++ b/build.gradle @@ -103,7 +103,7 @@ dependencies { /* testing */ testImplementation 'org.spockframework:spock-core:2.1-groovy-3.0' testImplementation 'org.scalatestplus:mockito-3-4_2.13:3.2.10.0' - implementation 'org.mockito:mockito-core:4.5.1' // mocking framework + implementation 'org.mockito:mockito-core:4.6.0' // mocking framework testImplementation "org.scalatest:scalatest_${scalaVersion}:3.2.12" testRuntimeOnly 'com.vladsch.flexmark:flexmark-all:0.64.0' //scalatest html output testImplementation group: 'org.pegdown', name: 'pegdown', version: '1.6.0' From b0af9fa8191bcc5e6af20eebc2119ccb121a1839 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Mon, 30 May 2022 11:36:53 +0200 Subject: [PATCH 34/58] Updating reviewers in dependabot config --- .github/dependabot.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 66ae07e78b..660970b64a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,8 +8,6 @@ updates: open-pull-requests-limit: 10 target-branch: dev reviewers: - - ckittl - - johanneshiry - t-ober - sensarmad - sebastian-peter From 709051c5012ff1bda38afb300f23e54be7eaec2f Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Tue, 31 May 2022 11:26:57 +0200 Subject: [PATCH 35/58] Updating kafka to 7.1.1 --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 56bae0f75a..3320672634 100644 --- a/build.gradle +++ b/build.gradle @@ -145,7 +145,7 @@ dependencies { /* Kafka */ implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.2.0' - implementation 'io.confluent:kafka-streams-avro-serde:6.2.4' + implementation 'io.confluent:kafka-streams-avro-serde:7.1.1' implementation "com.sksamuel.avro4s:avro4s-core_${scalaVersion}:4.0.13" implementation 'org.apache.commons:commons-math3:3.6.1' // apache commons math3 From 46513c8b811397d64824d595612c6a4cb22cf067 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Tue, 31 May 2022 15:46:02 +0200 Subject: [PATCH 36/58] Adding ConfigFailFast checking for kafka broker and topics --- .../resources/config/config-template.conf | 22 ++- .../ie3/simona/config/ConfigFailFast.scala | 25 ++-- .../edu/ie3/simona/config/SimonaConfig.scala | 135 +++++++++--------- .../ie3/simona/io/result/ResultSinkType.scala | 2 +- .../edu/ie3/simona/util/ConfigUtil.scala | 56 +++++++- .../simona/config/ConfigFailFastSpec.scala | 41 +++++- 6 files changed, 192 insertions(+), 89 deletions(-) diff --git a/src/main/resources/config/config-template.conf b/src/main/resources/config/config-template.conf index 2b3af31bb1..5ec7c44844 100644 --- a/src/main/resources/config/config-template.conf +++ b/src/main/resources/config/config-template.conf @@ -70,6 +70,20 @@ PrimaryDataCsvParams { timePattern: string | "yyyy-MM-dd'T'HH:mm:ss[.S[S][S]]'Z'" # default pattern from PSDM:TimeBasedSimpleValueFactory } +#@define abstract +KafkaParams { + runId: string + bootstrapServers: string + schemaRegistryUrl: string + linger: int // in ms +} + +#@define extends KafkaParams +ResultKafkaParams { + base: KafkaParams + topicNodeRes = string +} + #@define BaseOutputConfig { notifier: string # Result event notifier @@ -202,13 +216,7 @@ simona.output.sink.influxDb1x { } #@optional -simona.output.sink.kafka { - topicNodeRes = string - runId = string - bootstrapServers = string - schemaRegistryUrl = string - linger = int // in ms -} +simona.output.sink.kafka = ResultKafkaParams simona.output.grid = GridOutputConfig simona.output.participant = { diff --git a/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala b/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala index 1e646d7ddc..4eb817b5f5 100644 --- a/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala +++ b/src/main/scala/edu/ie3/simona/config/ConfigFailFast.scala @@ -8,15 +8,22 @@ package edu.ie3.simona.config import com.typesafe.config.{Config, ConfigException} import com.typesafe.scalalogging.LazyLogging -import edu.ie3.simona.config.SimonaConfig.Simona.Output.Sink.{InfluxDb1x, Kafka} -import edu.ie3.simona.config.SimonaConfig.{BaseOutputConfig, RefSystemConfig} +import edu.ie3.simona.config.SimonaConfig.Simona.Output.Sink.InfluxDb1x +import edu.ie3.simona.config.SimonaConfig.{ + BaseOutputConfig, + RefSystemConfig, + ResultKafkaParams +} import edu.ie3.simona.exceptions.InvalidConfigParameterException import edu.ie3.simona.io.result.ResultSinkType import edu.ie3.simona.model.participant.load.{LoadModelBehaviour, LoadReference} import edu.ie3.simona.service.primary.PrimaryServiceProxy import edu.ie3.simona.service.weather.WeatherSource import edu.ie3.simona.util.CollectionUtils -import edu.ie3.simona.util.ConfigUtil.DatabaseConfigUtil.checkInfluxDb1xParams +import edu.ie3.simona.util.ConfigUtil.DatabaseConfigUtil.{ + checkInfluxDb1xParams, + checkKafkaParams +} import edu.ie3.simona.util.ConfigUtil.{CsvConfigUtil, NotifierIdentifier} import edu.ie3.util.scala.ReflectionTools import edu.ie3.util.{StringUtils, TimeUtil} @@ -179,16 +186,8 @@ case object ConfigFailFast extends LazyLogging { ResultSinkType.buildInfluxDb1xUrl(influxDb1x), influxDb1x.database ) - case Some(Some(kafka: Kafka)) => - try { - UUID.fromString(kafka.runId) - } catch { - case e: IllegalArgumentException => - throw new InvalidConfigParameterException( - s"The UUID '${kafka.runId}' cannot be parsed as it is invalid.", - e - ) - } + case Some(Some(kafka: ResultKafkaParams)) => + checkKafkaParams(kafka, Seq(kafka.topicNodeRes)) case _ => // do nothing } diff --git a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala index 761dc49e75..c4ba623a6a 100644 --- a/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala +++ b/src/main/scala/edu/ie3/simona/config/SimonaConfig.scala @@ -290,6 +290,13 @@ object SimonaConfig { } + sealed abstract class KafkaParams( + val bootstrapServers: java.lang.String, + val linger: scala.Int, + val runId: java.lang.String, + val schemaRegistryUrl: java.lang.String + ) + final case class LoadRuntimeConfig( override val calculateMissingReactivePowerWithModel: scala.Boolean, override val scaling: scala.Double, @@ -554,6 +561,63 @@ object SimonaConfig { } + final case class ResultKafkaParams( + override val bootstrapServers: java.lang.String, + override val linger: scala.Int, + override val runId: java.lang.String, + override val schemaRegistryUrl: java.lang.String, + topicNodeRes: java.lang.String + ) extends KafkaParams(bootstrapServers, linger, runId, schemaRegistryUrl) + object ResultKafkaParams { + def apply( + c: com.typesafe.config.Config, + parentPath: java.lang.String, + $tsCfgValidator: $TsCfgValidator + ): SimonaConfig.ResultKafkaParams = { + SimonaConfig.ResultKafkaParams( + topicNodeRes = $_reqStr(parentPath, c, "topicNodeRes", $tsCfgValidator), + bootstrapServers = + $_reqStr(parentPath, c, "bootstrapServers", $tsCfgValidator), + linger = $_reqInt(parentPath, c, "linger", $tsCfgValidator), + runId = $_reqStr(parentPath, c, "runId", $tsCfgValidator), + schemaRegistryUrl = + $_reqStr(parentPath, c, "schemaRegistryUrl", $tsCfgValidator) + ) + } + private def $_reqInt( + parentPath: java.lang.String, + c: com.typesafe.config.Config, + path: java.lang.String, + $tsCfgValidator: $TsCfgValidator + ): scala.Int = { + if (c == null) 0 + else + try c.getInt(path) + catch { + case e: com.typesafe.config.ConfigException => + $tsCfgValidator.addBadPath(parentPath + path, e) + 0 + } + } + + private def $_reqStr( + parentPath: java.lang.String, + c: com.typesafe.config.Config, + path: java.lang.String, + $tsCfgValidator: $TsCfgValidator + ): java.lang.String = { + if (c == null) null + else + try c.getString(path) + catch { + case e: com.typesafe.config.ConfigException => + $tsCfgValidator.addBadPath(parentPath + path, e) + null + } + } + + } + final case class VoltLvlConfig( id: java.lang.String, vNom: java.lang.String @@ -1512,7 +1576,7 @@ object SimonaConfig { final case class Sink( csv: scala.Option[SimonaConfig.Simona.Output.Sink.Csv], influxDb1x: scala.Option[SimonaConfig.Simona.Output.Sink.InfluxDb1x], - kafka: scala.Option[SimonaConfig.Simona.Output.Sink.Kafka] + kafka: scala.Option[SimonaConfig.ResultKafkaParams] ) object Sink { final case class Csv( @@ -1594,64 +1658,6 @@ object SimonaConfig { } - final case class Kafka( - bootstrapServers: java.lang.String, - linger: scala.Int, - runId: java.lang.String, - schemaRegistryUrl: java.lang.String, - topicNodeRes: java.lang.String - ) - object Kafka { - def apply( - c: com.typesafe.config.Config, - parentPath: java.lang.String, - $tsCfgValidator: $TsCfgValidator - ): SimonaConfig.Simona.Output.Sink.Kafka = { - SimonaConfig.Simona.Output.Sink.Kafka( - bootstrapServers = - $_reqStr(parentPath, c, "bootstrapServers", $tsCfgValidator), - linger = $_reqInt(parentPath, c, "linger", $tsCfgValidator), - runId = $_reqStr(parentPath, c, "runId", $tsCfgValidator), - schemaRegistryUrl = - $_reqStr(parentPath, c, "schemaRegistryUrl", $tsCfgValidator), - topicNodeRes = - $_reqStr(parentPath, c, "topicNodeRes", $tsCfgValidator) - ) - } - private def $_reqInt( - parentPath: java.lang.String, - c: com.typesafe.config.Config, - path: java.lang.String, - $tsCfgValidator: $TsCfgValidator - ): scala.Int = { - if (c == null) 0 - else - try c.getInt(path) - catch { - case e: com.typesafe.config.ConfigException => - $tsCfgValidator.addBadPath(parentPath + path, e) - 0 - } - } - - private def $_reqStr( - parentPath: java.lang.String, - c: com.typesafe.config.Config, - path: java.lang.String, - $tsCfgValidator: $TsCfgValidator - ): java.lang.String = { - if (c == null) null - else - try c.getString(path) - catch { - case e: com.typesafe.config.ConfigException => - $tsCfgValidator.addBadPath(parentPath + path, e) - null - } - } - - } - def apply( c: com.typesafe.config.Config, parentPath: java.lang.String, @@ -1681,12 +1687,11 @@ object SimonaConfig { kafka = if (c.hasPathOrNull("kafka")) scala.Some( - SimonaConfig.Simona.Output.Sink - .Kafka( - c.getConfig("kafka"), - parentPath + "kafka.", - $tsCfgValidator - ) + SimonaConfig.ResultKafkaParams( + c.getConfig("kafka"), + parentPath + "kafka.", + $tsCfgValidator + ) ) else None ) diff --git a/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala b/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala index 600fd6c8dd..d4c444fdd7 100644 --- a/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala +++ b/src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala @@ -51,7 +51,7 @@ object ResultSinkType { Csv(params.fileFormat, params.filePrefix, params.fileSuffix) case Some(params: SimonaConfig.Simona.Output.Sink.InfluxDb1x) => InfluxDb1x(buildInfluxDb1xUrl(params), params.database, runName) - case Some(params: SimonaConfig.Simona.Output.Sink.Kafka) => + case Some(params: SimonaConfig.ResultKafkaParams) => Kafka( params.topicNodeRes, UUID.fromString(params.runId), diff --git a/src/main/scala/edu/ie3/simona/util/ConfigUtil.scala b/src/main/scala/edu/ie3/simona/util/ConfigUtil.scala index e2a98e0e70..ceef9abbdd 100644 --- a/src/main/scala/edu/ie3/simona/util/ConfigUtil.scala +++ b/src/main/scala/edu/ie3/simona/util/ConfigUtil.scala @@ -13,7 +13,7 @@ import edu.ie3.datamodel.io.connectors.{ SqlConnector } -import java.util.UUID +import java.util.{Properties, UUID} import edu.ie3.datamodel.models.result.connector.{ LineResult, SwitchResult, @@ -25,10 +25,14 @@ import edu.ie3.simona.config.SimonaConfig import edu.ie3.simona.config.SimonaConfig._ import edu.ie3.simona.event.notifier.{Notifier, ParticipantNotifierConfig} import edu.ie3.simona.exceptions.InvalidConfigParameterException +import org.apache.kafka.clients.admin.AdminClient +import org.apache.kafka.common.KafkaException import java.io.File +import java.util.concurrent.ExecutionException import scala.collection.mutable -import scala.util.{Failure, Success, Try} +import scala.jdk.CollectionConverters._ +import scala.util.{Failure, Success, Try, Using} object ConfigUtil { @@ -457,6 +461,54 @@ object ConfigUtil { ) } } + + def checkKafkaParams( + kafkaParams: KafkaParams, + topics: Seq[String] + ): Unit = { + try { + UUID.fromString(kafkaParams.runId) + } catch { + case e: IllegalArgumentException => + throw new InvalidConfigParameterException( + s"The UUID '${kafkaParams.runId}' cannot be parsed as it is invalid.", + e + ) + } + + val properties = new Properties() + properties.put("bootstrap.servers", kafkaParams.bootstrapServers) + properties.put("default.api.timeout.ms", 2000) + properties.put("request.timeout.ms", 1000) + try { + Using(AdminClient.create(properties)) { client => + val existingTopics = client.listTopics.names().get().asScala + topics.filterNot(existingTopics.contains) + } match { + case Failure(ke: KafkaException) => + throw new InvalidConfigParameterException( + s"Exception creating kafka client for broker ${kafkaParams.bootstrapServers}.", + ke + ) + case Failure(ee: ExecutionException) => + throw new InvalidConfigParameterException( + s"Connection with kafka broker ${kafkaParams.bootstrapServers} failed.", + ee + ) + case Failure(other) => + throw new InvalidConfigParameterException( + s"Checking kafka config failed with unexpected exception.", + other + ) + case Success(missingTopics) if missingTopics.nonEmpty => + throw new InvalidConfigParameterException( + s"Required kafka topics {${missingTopics.mkString}} do not exist." + ) + case Success(_) => + // testing connection succeeded, do nothing + } + } + } } } diff --git a/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala b/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala index f85cb2bf69..c26f0447d5 100644 --- a/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala +++ b/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala @@ -7,7 +7,7 @@ package edu.ie3.simona.config import com.typesafe.config.ConfigFactory -import edu.ie3.simona.config.SimonaConfig.BaseCsvParams +import edu.ie3.simona.config.SimonaConfig.{BaseCsvParams, ResultKafkaParams} import edu.ie3.simona.config.SimonaConfig.Simona.Input.Weather.Datasource.CoordinateSource import edu.ie3.simona.config.SimonaConfig.Simona.Output.Sink import edu.ie3.simona.config.SimonaConfig.Simona.Output.Sink.{Csv, InfluxDb1x} @@ -716,6 +716,45 @@ class ConfigFailFastSpec extends UnitSpec with ConfigTestData { "Exception: java.lang.IllegalArgumentException: Unable to parse url: :0" } + "throw an exception if kafka is configured with a malformed UUID" in { + intercept[InvalidConfigParameterException] { + ConfigFailFast invokePrivate checkDataSinks( + Sink( + None, + None, + Some( + ResultKafkaParams( + "server:1234", + 0, + "-not-a-uuid-", + "https://reg:123", + "topic" + ) + ) + ) + ) + }.getMessage shouldBe "The UUID '-not-a-uuid-' cannot be parsed as it is invalid." + } + + "throw an exception if kafka is configured, but connection to broker fails" in { + intercept[InvalidConfigParameterException] { + ConfigFailFast invokePrivate checkDataSinks( + Sink( + None, + None, + Some( + ResultKafkaParams( + "doesnotexist:1234", + 0, + "00000000-0000-0000-0000-000000000000", + "https://reg:123", + "topic" + ) + ) + ) + ) + }.getMessage shouldBe "Exception creating kafka client for broker doesnotexist:1234." + } } "Checking grid data sources" should { From 4966f120d4536081196d61a13f9c7ba19856a1d6 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Tue, 31 May 2022 16:02:18 +0200 Subject: [PATCH 37/58] Added actual test for kafka broker connection --- .../simona/config/ConfigFailFastSpec.scala | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala b/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala index c26f0447d5..27817b65e0 100644 --- a/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala +++ b/src/test/scala/edu/ie3/simona/config/ConfigFailFastSpec.scala @@ -736,6 +736,26 @@ class ConfigFailFastSpec extends UnitSpec with ConfigTestData { }.getMessage shouldBe "The UUID '-not-a-uuid-' cannot be parsed as it is invalid." } + "throw an exception if kafka is configured, but creating kafka client fails" in { + intercept[InvalidConfigParameterException] { + ConfigFailFast invokePrivate checkDataSinks( + Sink( + None, + None, + Some( + ResultKafkaParams( + "not§a§server", + 0, + "00000000-0000-0000-0000-000000000000", + "https://reg:123", + "topic" + ) + ) + ) + ) + }.getMessage shouldBe "Exception creating kafka client for broker not§a§server." + } + "throw an exception if kafka is configured, but connection to broker fails" in { intercept[InvalidConfigParameterException] { ConfigFailFast invokePrivate checkDataSinks( @@ -744,7 +764,7 @@ class ConfigFailFastSpec extends UnitSpec with ConfigTestData { None, Some( ResultKafkaParams( - "doesnotexist:1234", + "localhost:12345", 0, "00000000-0000-0000-0000-000000000000", "https://reg:123", @@ -753,7 +773,7 @@ class ConfigFailFastSpec extends UnitSpec with ConfigTestData { ) ) ) - }.getMessage shouldBe "Exception creating kafka client for broker doesnotexist:1234." + }.getMessage shouldBe "Connection with kafka broker localhost:12345 failed." } } From a2a97a6c73af6ee8dc181584b035a2ab38e7617c Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Tue, 31 May 2022 16:20:14 +0200 Subject: [PATCH 38/58] Adding test in ResultSinkTypeSpec --- .../simona/io/result/ResultSinkTypeSpec.scala | 39 ++++++++++++++++++- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/src/test/scala/edu/ie3/simona/io/result/ResultSinkTypeSpec.scala b/src/test/scala/edu/ie3/simona/io/result/ResultSinkTypeSpec.scala index 2a90e5d73a..66b6986c47 100644 --- a/src/test/scala/edu/ie3/simona/io/result/ResultSinkTypeSpec.scala +++ b/src/test/scala/edu/ie3/simona/io/result/ResultSinkTypeSpec.scala @@ -7,9 +7,12 @@ package edu.ie3.simona.io.result import edu.ie3.simona.config.SimonaConfig -import edu.ie3.simona.io.result.ResultSinkType.{Csv, InfluxDb1x} +import edu.ie3.simona.config.SimonaConfig.ResultKafkaParams +import edu.ie3.simona.io.result.ResultSinkType.{Csv, InfluxDb1x, Kafka} import edu.ie3.simona.test.common.UnitSpec +import java.util.UUID + class ResultSinkTypeSpec extends UnitSpec { "A ResultSinkType" should { "be instantiated correctly when supplying a csv sink" in { @@ -60,7 +63,39 @@ class ResultSinkTypeSpec extends UnitSpec { } } - // TODO kafka sink + "be instantiated correctly when supplying a kafka sink" in { + val conf = SimonaConfig.Simona.Output.Sink( + csv = None, + influxDb1x = None, + kafka = Some( + ResultKafkaParams( + "localhost:9092", + 12, + "00000000-0000-0000-0000-000000000000", + "https://reg:123", + "topic" + ) + ) + ) + val runName = "testRun" + + inside(ResultSinkType(conf, runName)) { + case Kafka( + topicNodeRes, + runId, + bootstrapServers, + schemaRegistryUrl, + linger + ) => + topicNodeRes shouldBe "topic" + runId shouldBe UUID.fromString("00000000-0000-0000-0000-000000000000") + bootstrapServers shouldBe "localhost:9092" + schemaRegistryUrl shouldBe "https://reg:123" + linger shouldBe 12 + case _ => + fail("Wrong ResultSinkType got instantiated.") + } + } "fail when more than one sink is supplied" in { val conf = SimonaConfig.Simona.Output.Sink( From 6c85aa03718b2fdec940133511d5dc7f82dad9d8 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Tue, 31 May 2022 22:53:31 +0200 Subject: [PATCH 39/58] Adding to changelog --- CHANGELOG.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e9c3f96807..8f7276f378 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,8 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Implement SQL source for primary data [#34](https://github.com/ie3-institute/simona/issues/34), [#101](https://github.com/ie3-institute/simona/issues/101) - Relevant scientific papers have been added to the documentation [#139](https://github.com/ie3-institute/simona/issues/139) - Add troubleshooting section to Users guide [#160](https://github.com/ie3-institute/simona/issues/160) - - +- Added Kafka sink for results [#24](https://github.com/ie3-institute/simona/issues/24) ### Changed - Re-organizing test resources into their respective packages [#105](https://github.com/ie3-institute/simona/issues/105) From 674ab57119b5c413fb6225c609b20c84886f7d70 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Jun 2022 08:06:45 +0000 Subject: [PATCH 40/58] Bump testContainerVersion from 0.40.7 to 0.40.8 (#249) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 3ae41591bd..10e1a7ad38 100644 --- a/build.gradle +++ b/build.gradle @@ -30,7 +30,7 @@ ext { tscfgVersion = '0.9.998' scapegoatVersion = '1.4.14' - testContainerVersion = '0.40.7' + testContainerVersion = '0.40.8' scriptsLocation = 'gradle' + File.separator + 'scripts' + File.separator // location of script plugins } From d29262423d20e4959060287d95f3b274fc8c0af4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 3 Jun 2022 09:16:52 +0000 Subject: [PATCH 41/58] Bump mockito-core from 4.6.0 to 4.6.1 (#250) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 10e1a7ad38..b0ae0f9771 100644 --- a/build.gradle +++ b/build.gradle @@ -103,7 +103,7 @@ dependencies { /* testing */ testImplementation 'org.spockframework:spock-core:2.1-groovy-3.0' testImplementation 'org.scalatestplus:mockito-3-4_2.13:3.2.10.0' - implementation 'org.mockito:mockito-core:4.6.0' // mocking framework + implementation 'org.mockito:mockito-core:4.6.1' // mocking framework testImplementation "org.scalatest:scalatest_${scalaVersion}:3.2.12" testRuntimeOnly 'com.vladsch.flexmark:flexmark-all:0.64.0' //scalatest html output testImplementation group: 'org.pegdown', name: 'pegdown', version: '1.6.0' From 9f5107e2acf67faf88434b57605e9114ec2ff64a Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Sat, 4 Jun 2022 01:33:29 +0200 Subject: [PATCH 42/58] Preliminary LoadProfile fixes --- .../load/profile/LoadProfileKey.scala | 5 ++--- .../load/profile/LoadProfileStore.scala | 9 ++++++--- .../load/profile/ProfileLoadModel.scala | 10 ++++++---- .../participant/load/FixedLoadModelTest.groovy | 5 ++--- .../participant/load/ProfileLoadModelTest.groovy | 4 ++-- .../participant/load/RandomLoadModelTest.groovy | 2 +- .../model/participant/load/LoadModelSpec.scala | 16 +++++++--------- .../participant/load/LoadProfileStoreSpec.scala | 10 +++++----- .../test/common/input/LoadInputTestData.scala | 6 +++--- 9 files changed, 34 insertions(+), 33 deletions(-) diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala index 8f59e10e9b..d008f64754 100644 --- a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala +++ b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala @@ -7,9 +7,8 @@ package edu.ie3.simona.model.participant.load.profile import java.time.ZonedDateTime - import edu.ie3.datamodel.exceptions.ParsingException -import edu.ie3.datamodel.models.StandardLoadProfile +import edu.ie3.datamodel.models.profile.{LoadProfile, StandardLoadProfile} import edu.ie3.simona.model.participant.load import edu.ie3.simona.model.participant.load.{DayType, profile} @@ -69,7 +68,7 @@ case object LoadProfileKey { ): LoadProfileKey = { try { new LoadProfileKey( - StandardLoadProfile.parse(loadProfile), + LoadProfile.parse(loadProfile).asInstanceOf[StandardLoadProfile], Season(season), DayType(dayType) ) diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileStore.scala index 7757a0a302..8a2ffd6029 100644 --- a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileStore.scala +++ b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileStore.scala @@ -12,7 +12,10 @@ import java.util import breeze.numerics.round import com.typesafe.scalalogging.LazyLogging -import edu.ie3.datamodel.models.{BdewLoadProfile, StandardLoadProfile} +import edu.ie3.datamodel.models.profile.{ + BdewStandardLoadProfile, + StandardLoadProfile +} import edu.ie3.simona.model.participant.load.profile.LoadProfileStore.{ initializeMaxConsumptionPerProfile, initializeTypeDayValues @@ -65,7 +68,7 @@ class LoadProfileStore private (val reader: Reader) { case Some(typeDayValues) => val quarterHourEnergy = typeDayValues.getQuarterHourEnergy(time) val load = loadProfile match { - case BdewLoadProfile.H0 => + case BdewStandardLoadProfile.H0 => /* For the residential average profile, a dynamization has to be taken into account */ val t = time.getDayOfYear // leap years are ignored LoadProfileStore.dynamization(quarterHourEnergy, t) @@ -205,7 +208,7 @@ object LoadProfileStore extends LazyLogging { knownLoadProfiles .flatMap(loadProfile => { (loadProfile match { - case BdewLoadProfile.H0 => + case BdewStandardLoadProfile.H0 => // max load for h0 is expected to be exclusively found in winter, // thus we only search there. DayType.values diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/profile/ProfileLoadModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/profile/ProfileLoadModel.scala index 6cb73ff089..b9b207fef4 100644 --- a/src/main/scala/edu/ie3/simona/model/participant/load/profile/ProfileLoadModel.scala +++ b/src/main/scala/edu/ie3/simona/model/participant/load/profile/ProfileLoadModel.scala @@ -6,7 +6,7 @@ package edu.ie3.simona.model.participant.load.profile -import edu.ie3.datamodel.models.StandardLoadProfile +import edu.ie3.datamodel.models.profile.StandardLoadProfile import edu.ie3.datamodel.models.input.system.LoadInput import edu.ie3.simona.model.participant.CalcRelevantData.LoadRelevantData import edu.ie3.simona.model.participant.control.QControl @@ -136,13 +136,15 @@ case object ProfileLoadModel { QControl.apply(input.getqCharacteristics()), sRatedPowerScaled, input.getCosPhiRated, - input.getStandardLoadProfile, + input.getLoadProfile.asInstanceOf[StandardLoadProfile], reference ) case LoadReference.EnergyConsumption(energyConsumption) => val loadProfileMax = - LoadProfileStore().maxPower(input.getStandardLoadProfile) + LoadProfileStore().maxPower( + input.getLoadProfile.asInstanceOf[StandardLoadProfile] + ) val sRatedEnergy = LoadModel.scaleSRatedEnergy( input, energyConsumption, @@ -157,7 +159,7 @@ case object ProfileLoadModel { QControl.apply(input.getqCharacteristics()), sRatedEnergy, input.getCosPhiRated, - input.getStandardLoadProfile, + input.getLoadProfile.asInstanceOf[StandardLoadProfile], reference ) } diff --git a/src/test/groovy/edu/ie3/simona/model/participant/load/FixedLoadModelTest.groovy b/src/test/groovy/edu/ie3/simona/model/participant/load/FixedLoadModelTest.groovy index 925470e52a..26492f333f 100644 --- a/src/test/groovy/edu/ie3/simona/model/participant/load/FixedLoadModelTest.groovy +++ b/src/test/groovy/edu/ie3/simona/model/participant/load/FixedLoadModelTest.groovy @@ -6,7 +6,7 @@ package edu.ie3.simona.model.participant.load -import edu.ie3.datamodel.models.BdewLoadProfile +import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput @@ -14,7 +14,6 @@ import edu.ie3.datamodel.models.input.system.LoadInput import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed import edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils import edu.ie3.simona.model.SystemComponent -import edu.ie3.simona.model.participant.CalcRelevantData import edu.ie3.simona.model.participant.control.QControl import edu.ie3.util.TimeUtil import spock.lang.Specification @@ -45,7 +44,7 @@ class FixedLoadModelTest extends Specification { -1 ), new CosPhiFixed("cosPhiFixed:{(0.0,0.95)}"), - BdewLoadProfile.H0, + BdewStandardLoadProfile.H0, false, Quantities.getQuantity(3000d, KILOWATTHOUR), Quantities.getQuantity(282.74d, VOLTAMPERE), diff --git a/src/test/groovy/edu/ie3/simona/model/participant/load/ProfileLoadModelTest.groovy b/src/test/groovy/edu/ie3/simona/model/participant/load/ProfileLoadModelTest.groovy index a645ef9f6d..0f4de025b3 100644 --- a/src/test/groovy/edu/ie3/simona/model/participant/load/ProfileLoadModelTest.groovy +++ b/src/test/groovy/edu/ie3/simona/model/participant/load/ProfileLoadModelTest.groovy @@ -24,7 +24,7 @@ import javax.measure.quantity.Energy import java.time.temporal.ChronoUnit import java.util.stream.Collectors -import static edu.ie3.datamodel.models.BdewLoadProfile.* +import static edu.ie3.datamodel.models.profile.BdewStandardLoadProfile.* import static edu.ie3.simona.model.participant.load.LoadReference.ActivePower import static edu.ie3.simona.model.participant.load.LoadReference.EnergyConsumption import static edu.ie3.util.quantities.PowerSystemUnits.* @@ -71,7 +71,7 @@ class ProfileLoadModelTest extends Specification { def "A profile load model should be instantiated from valid input correctly"() { when: def actual = ProfileLoadModel.apply( - loadInput.copy().standardLoadProfile(profile).build(), + loadInput.copy().loadprofile(profile).build(), foreSeenOperationInterval, 1.0, reference) diff --git a/src/test/groovy/edu/ie3/simona/model/participant/load/RandomLoadModelTest.groovy b/src/test/groovy/edu/ie3/simona/model/participant/load/RandomLoadModelTest.groovy index 5089d7da95..171bd8ad22 100644 --- a/src/test/groovy/edu/ie3/simona/model/participant/load/RandomLoadModelTest.groovy +++ b/src/test/groovy/edu/ie3/simona/model/participant/load/RandomLoadModelTest.groovy @@ -24,7 +24,7 @@ import javax.measure.quantity.Energy import java.time.temporal.ChronoUnit import java.util.stream.Collectors -import static edu.ie3.datamodel.models.BdewLoadProfile.H0 +import static edu.ie3.datamodel.models.profile.BdewStandardLoadProfile.H0 import static edu.ie3.simona.model.participant.load.LoadReference.ActivePower import static edu.ie3.simona.model.participant.load.LoadReference.EnergyConsumption import static edu.ie3.util.quantities.PowerSystemUnits.* diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala index 467214b051..b2ea0bec8d 100644 --- a/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala +++ b/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala @@ -6,25 +6,22 @@ package edu.ie3.simona.model.participant.load +import edu.ie3.datamodel.models.profile.StandardLoadProfile import edu.ie3.simona.model.participant.control.QControl -import edu.ie3.simona.model.participant.load.LoadReference.{ - ActivePower, - EnergyConsumption -} import edu.ie3.simona.model.participant.load.profile.ProfileLoadModel import edu.ie3.simona.model.participant.load.random.RandomLoadModel import edu.ie3.simona.test.common.UnitSpec import edu.ie3.simona.test.common.input.LoadInputTestData -import edu.ie3.simona.util.ConfigUtil -import edu.ie3.util.quantities.PowerSystemUnits.{KILOWATTHOUR, MEGAVOLTAMPERE} +import edu.ie3.util.quantities.PowerSystemUnits.KILOWATTHOUR import edu.ie3.util.quantities.{PowerSystemUnits, QuantityUtil} -import javax.measure.Quantity -import javax.measure.quantity.Power import org.scalatest.PrivateMethodTester import org.scalatest.prop.TableDrivenPropertyChecks import tech.units.indriya.quantity.Quantities import tech.units.indriya.unit.Units.WATT +import javax.measure.Quantity +import javax.measure.quantity.Power + class LoadModelSpec extends UnitSpec with LoadInputTestData @@ -85,7 +82,8 @@ class LoadModelSpec quantityTolerance ) shouldBe true cosPhiRated shouldBe loadInput.getCosPhiRated - loadProfile shouldBe loadInput.getStandardLoadProfile + loadProfile shouldBe loadInput.getLoadProfile + .asInstanceOf[StandardLoadProfile] reference shouldBe foreSeenReference } } diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/LoadProfileStoreSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/LoadProfileStoreSpec.scala index a3c8d34676..2702c1b0b4 100644 --- a/src/test/scala/edu/ie3/simona/model/participant/load/LoadProfileStoreSpec.scala +++ b/src/test/scala/edu/ie3/simona/model/participant/load/LoadProfileStoreSpec.scala @@ -12,8 +12,8 @@ import java.time.temporal.ChronoUnit import breeze.numerics.abs import com.typesafe.scalalogging.LazyLogging -import edu.ie3.datamodel.models.BdewLoadProfile._ -import edu.ie3.datamodel.models.{BdewLoadProfile, StandardLoadProfile} +import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile._ +import edu.ie3.datamodel.models.profile.StandardLoadProfile import edu.ie3.simona.model.participant.load.profile.{ LoadProfileKey, LoadProfileStore, @@ -113,10 +113,10 @@ class LoadProfileStoreSpec /* List the expected annual energy consumption */ val expectedEnergyConsumption : Map[StandardLoadProfile, ComparableQuantity[Energy]] = Map( - BdewLoadProfile.H0 -> Quantities.getQuantity(1000d, KILOWATTHOUR), - BdewLoadProfile.L0 -> Quantities.getQuantity(1002d, KILOWATTHOUR), + H0 -> Quantities.getQuantity(1000d, KILOWATTHOUR), + L0 -> Quantities.getQuantity(1002d, KILOWATTHOUR), /* TODO: Check, if this is correct */ - BdewLoadProfile.G0 -> Quantities.getQuantity(1022d, KILOWATTHOUR) + G0 -> Quantities.getQuantity(1022d, KILOWATTHOUR) ) /* Collect all available time steps in 2020 */ diff --git a/src/test/scala/edu/ie3/simona/test/common/input/LoadInputTestData.scala b/src/test/scala/edu/ie3/simona/test/common/input/LoadInputTestData.scala index 139e1a577f..b99c09fc8f 100644 --- a/src/test/scala/edu/ie3/simona/test/common/input/LoadInputTestData.scala +++ b/src/test/scala/edu/ie3/simona/test/common/input/LoadInputTestData.scala @@ -7,11 +7,11 @@ package edu.ie3.simona.test.common.input import java.util.UUID - import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.LoadInput import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed -import edu.ie3.datamodel.models.{BdewLoadProfile, OperationTime} +import edu.ie3.datamodel.models.OperationTime +import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile import edu.ie3.util.quantities.PowerSystemUnits.{KILOWATTHOUR, VOLTAMPERE} import tech.units.indriya.quantity.Quantities @@ -29,7 +29,7 @@ trait LoadInputTestData extends NodeInputTestData { OperationTime.notLimited(), nodeInputNoSlackNs04KvA, new CosPhiFixed("cosPhiFixed:{(0.0,0.95)}"), - BdewLoadProfile.H0, + BdewStandardLoadProfile.H0, false, Quantities.getQuantity(3000d, KILOWATTHOUR), Quantities.getQuantity(282.74d, VOLTAMPERE), From 4a1688d013b52ffe1dadd7a657159318736e64a0 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Sat, 4 Jun 2022 01:58:26 +0200 Subject: [PATCH 43/58] Improving ResultEvenListener parameters Improving ResultEventListener Kafka test --- .../event/listener/ResultEventListener.scala | 17 +- .../sim/setup/SimonaStandaloneSetup.scala | 1 - .../ie3/simona/util/ResultFileHierarchy.scala | 3 + .../listener/ResultEventListenerSpec.scala | 36 +-- .../io/result/ResultEntityKafkaSpec.scala | 208 +++++++++++------- ...afkaFlatSpec.scala => KafkaSpecLike.scala} | 18 +- 6 files changed, 158 insertions(+), 125 deletions(-) rename src/test/scala/edu/ie3/simona/test/{KafkaFlatSpec.scala => KafkaSpecLike.scala} (75%) diff --git a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala index 5054bca294..43b59cf1eb 100644 --- a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala +++ b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala @@ -76,13 +76,11 @@ object ResultEventListener extends Transformer3wResultSupport { ) extends ResultEventListenerData def props( - eventClassesToConsider: Set[Class[_ <: ResultEntity]], resultFileHierarchy: ResultFileHierarchy, supervisor: ActorRef ): Props = Props( new ResultEventListener( - eventClassesToConsider, resultFileHierarchy, supervisor ) @@ -92,20 +90,19 @@ object ResultEventListener extends Transformer3wResultSupport { * with the model names as strings. It generates one sink for each model * class. * - * @param eventClassesToConsider - * Incoming event classes that should be considered + * @param resultFileHierarchy + * The result file hierarchy * @return * mapping of the model class to the sink for this model class */ private def initializeSinks( - eventClassesToConsider: Set[Class[_ <: ResultEntity]], resultFileHierarchy: ResultFileHierarchy )(implicit materializer: Materializer ): Iterable[Future[(Class[_], ResultEntitySink)]] = { resultFileHierarchy.resultSinkType match { case _: ResultSinkType.Csv => - eventClassesToConsider + resultFileHierarchy.resultEntitiesToConsider .map(resultClass => { resultFileHierarchy.rawOutputDataFilePaths .get(resultClass) @@ -136,7 +133,7 @@ object ResultEventListener extends Transformer3wResultSupport { }) case ResultSinkType.InfluxDb1x(url, database, scenario) => // creates one connection per result entity that should be processed - eventClassesToConsider + resultFileHierarchy.resultEntitiesToConsider .map(resultClass => ResultEntityInfluxDbSink(url, database, scenario).map( (resultClass, _) @@ -172,7 +169,6 @@ object ResultEventListener extends Transformer3wResultSupport { } class ResultEventListener( - eventClassesToConsider: Set[Class[_ <: ResultEntity]], resultFileHierarchy: ResultFileHierarchy, supervisor: ActorRef ) extends SimonaListener @@ -186,7 +182,9 @@ class ResultEventListener( log.debug("Starting initialization!") log.debug( s"Events that will be processed: {}", - eventClassesToConsider.map(_.getSimpleName).mkString(",") + resultFileHierarchy.resultEntitiesToConsider + .map(_.getSimpleName) + .mkString(",") ) self ! Init } @@ -321,7 +319,6 @@ class ResultEventListener( Future .sequence( ResultEventListener.initializeSinks( - eventClassesToConsider, resultFileHierarchy ) ) diff --git a/src/main/scala/edu/ie3/simona/sim/setup/SimonaStandaloneSetup.scala b/src/main/scala/edu/ie3/simona/sim/setup/SimonaStandaloneSetup.scala index 688513d307..a7222211fd 100644 --- a/src/main/scala/edu/ie3/simona/sim/setup/SimonaStandaloneSetup.scala +++ b/src/main/scala/edu/ie3/simona/sim/setup/SimonaStandaloneSetup.scala @@ -248,7 +248,6 @@ class SimonaStandaloneSetup( .toVector :+ context.simonaActorOf( ResultEventListener.props( - SetupHelper.allResultEntitiesToWrite(simonaConfig.simona.output), resultFileHierarchy, simonaSim ) diff --git a/src/main/scala/edu/ie3/simona/util/ResultFileHierarchy.scala b/src/main/scala/edu/ie3/simona/util/ResultFileHierarchy.scala index 8e717c229a..61276196db 100644 --- a/src/main/scala/edu/ie3/simona/util/ResultFileHierarchy.scala +++ b/src/main/scala/edu/ie3/simona/util/ResultFileHierarchy.scala @@ -62,6 +62,9 @@ final case class ResultFileHierarchy( val resultSinkType: ResultSinkType = resultEntityPathConfig.resultSinkType + val resultEntitiesToConsider: Set[Class[_ <: ResultEntity]] = + resultEntityPathConfig.resultEntitiesToConsider + val rawOutputDataFilePaths: Map[Class[_ <: ResultEntity], String] = { resultSinkType match { case csv: Csv => diff --git a/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala b/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala index 8a95b4fa64..49d1743d10 100644 --- a/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala +++ b/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala @@ -71,17 +71,20 @@ class ResultEventListenerSpec ) // the OutputFileHierarchy - val resultFileHierarchy: (Int, String) => ResultFileHierarchy = - (runId: Int, fileFormat: String) => - ResultFileHierarchy( - outputDir = testTmpDir + File.separator + runId, - simulationName, - ResultEntityPathConfig( - resultEntitiesToBeWritten, - ResultSinkType.Csv(fileFormat = fileFormat) - ), - createDirs = true - ) + private def resultFileHierarchy( + runId: Int, + fileFormat: String, + classes: Set[Class[_ <: ResultEntity]] = resultEntitiesToBeWritten + ): ResultFileHierarchy = + ResultFileHierarchy( + outputDir = testTmpDir + File.separator + runId, + simulationName, + ResultEntityPathConfig( + classes, + ResultSinkType.Csv(fileFormat = fileFormat) + ), + createDirs = true + ) def createDir( resultFileHierarchy: ResultFileHierarchy, @@ -139,11 +142,11 @@ class ResultEventListenerSpec } "check if actor dies when it should die" in { - val fileHierarchy = resultFileHierarchy(2, ".ttt") + val fileHierarchy = + resultFileHierarchy(2, ".ttt", Set(classOf[Transformer3WResult])) val testProbe = TestProbe() val listener = testProbe.childActorOf( ResultEventListener.props( - Set(classOf[Transformer3WResult]), fileHierarchy, testProbe.ref ) @@ -162,7 +165,6 @@ class ResultEventListenerSpec val listenerRef = system.actorOf( ResultEventListener .props( - resultEntitiesToBeWritten, specificOutputFileHierarchy, testActor ) @@ -208,7 +210,6 @@ class ResultEventListenerSpec val listenerRef = system.actorOf( ResultEventListener .props( - resultEntitiesToBeWritten, specificOutputFileHierarchy, testActor ) @@ -292,10 +293,10 @@ class ResultEventListenerSpec PrivateMethod[Map[Transformer3wKey, AggregatedTransformer3wResult]]( Symbol("registerPartialTransformer3wResult") ) - val fileHierarchy = resultFileHierarchy(5, ".csv") + val fileHierarchy = + resultFileHierarchy(5, ".csv", Set(classOf[Transformer3WResult])) val listener = TestFSMRef( new ResultEventListener( - Set(classOf[Transformer3WResult]), fileHierarchy, testActor ) @@ -526,7 +527,6 @@ class ResultEventListenerSpec val listenerRef = system.actorOf( ResultEventListener .props( - resultEntitiesToBeWritten, specificOutputFileHierarchy, testActor ) diff --git a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala index d602890016..69052af3a1 100644 --- a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala +++ b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala @@ -6,12 +6,19 @@ package edu.ie3.simona.io.result +import akka.actor.ActorSystem +import akka.testkit.TestActorRef import com.sksamuel.avro4s.RecordFormat +import com.typesafe.config.ConfigFactory import edu.ie3.datamodel.models.result.NodeResult +import edu.ie3.simona.event.ResultEvent.PowerFlowResultEvent +import edu.ie3.simona.event.listener.ResultEventListener import edu.ie3.simona.io.result.plain.PlainResult.PlainNodeResult import edu.ie3.simona.io.result.plain.PlainWriter -import edu.ie3.simona.test.KafkaFlatSpec -import edu.ie3.simona.test.KafkaFlatSpec.Topic +import edu.ie3.simona.test.KafkaSpecLike +import edu.ie3.simona.test.KafkaSpecLike.Topic +import edu.ie3.simona.test.common.{TestKitWithShutdown, UnitSpec} +import edu.ie3.simona.util.ResultFileHierarchy import edu.ie3.util.quantities.PowerSystemUnits import edu.ie3.util.scala.io.ScalaReflectionSerde import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG @@ -19,7 +26,8 @@ import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.serialization.{Deserializer, Serdes} import org.apache.kafka.common.utils.Bytes -import org.scalatest.OptionValues +import org.scalatest.GivenWhenThen +import org.scalatest.concurrent.Eventually import tech.units.indriya.quantity.Quantities import java.time.ZonedDateTime @@ -32,7 +40,22 @@ import scala.language.postfixOps /** Adapted from * https://kafka-tutorials.confluent.io/produce-consume-lang/scala.html */ -class ResultEntityKafkaSpec extends KafkaFlatSpec with OptionValues { +class ResultEntityKafkaSpec + extends TestKitWithShutdown( + ActorSystem( + "ResultEntityKafkaSpec", + ConfigFactory + .parseString( + """akka.loggers = ["edu.ie3.simona.test.common.SilentTestEventListener"] + |akka.loglevel="info" + """.stripMargin + ) + ) + ) + with UnitSpec + with KafkaSpecLike + with GivenWhenThen + with Eventually { var testConsumer: KafkaConsumer[Bytes, PlainNodeResult] = _ @@ -65,92 +88,111 @@ class ResultEntityKafkaSpec extends KafkaFlatSpec with OptionValues { ) } - "produce" should "write a series of new NodeResults to kafka" in { - - Given("a producer config") - - val mockSchemaRegistryUrl = "mock://unused:8081" + "ResultEventListener with Kafka configuration" should { + "write a series of new NodeResults to Kafka" in { + + Given("a ResultEventListener with Kafka config") + + val mockSchemaRegistryUrl = "mock://unused:8081" + + val runId = UUID.randomUUID() + + // build the listener + val listenerRef = TestActorRef( + ResultEventListener.props( + ResultFileHierarchy( + "out", + "simName", + ResultFileHierarchy.ResultEntityPathConfig( + Set(classOf[NodeResult]), + ResultSinkType.Kafka( + topic, + runId, + kafka.bootstrapServers, + mockSchemaRegistryUrl, + 0 + ) + ) + ), + testActor + ) + ) - val runId = UUID.randomUUID() + And("a collection of NodeResults") + val nodeRes1 = new NodeResult( + ZonedDateTime.parse("2021-01-01T00:00:00+01:00[Europe/Berlin]"), + UUID.randomUUID(), + Quantities.getQuantity(1d, PowerSystemUnits.PU), + Quantities.getQuantity(0d, PowerSystemUnits.DEGREE_GEOM) + ) + val nodeRes2 = new NodeResult( + ZonedDateTime.parse("2021-01-01T00:00:00+01:00[Europe/Berlin]"), + UUID.randomUUID(), + Quantities.getQuantity(0.8d, PowerSystemUnits.PU), + Quantities.getQuantity(15d, PowerSystemUnits.DEGREE_GEOM) + ) + val nodeRes3 = new NodeResult( + ZonedDateTime.parse("2021-01-10T00:00:00+01:00[Europe/Berlin]"), + UUID.randomUUID(), + Quantities.getQuantity(0.75d, PowerSystemUnits.PU), + Quantities.getQuantity(90d, PowerSystemUnits.DEGREE_GEOM) + ) - val resultEntitySink = ResultEntityKafkaSink[NodeResult]( - topic, - runId, - kafka.bootstrapServers, - mockSchemaRegistryUrl, - 0 - ) + When("receiving the NodeResults") + listenerRef ! PowerFlowResultEvent( + Iterable(nodeRes1, nodeRes2, nodeRes3), + Iterable.empty, + Iterable.empty, + Iterable.empty, + Iterable.empty + ) - And("a collection of NodeResults") - val nodeRes1 = new NodeResult( - ZonedDateTime.parse("2021-01-01T00:00:00+01:00[Europe/Berlin]"), - UUID.randomUUID(), - Quantities.getQuantity(1d, PowerSystemUnits.PU), - Quantities.getQuantity(0d, PowerSystemUnits.DEGREE_GEOM) - ) - val nodeRes2 = new NodeResult( - ZonedDateTime.parse("2021-01-01T00:00:00+01:00[Europe/Berlin]"), - UUID.randomUUID(), - Quantities.getQuantity(0.8d, PowerSystemUnits.PU), - Quantities.getQuantity(15d, PowerSystemUnits.DEGREE_GEOM) - ) - val nodeRes3 = new NodeResult( - ZonedDateTime.parse("2021-01-10T00:00:00+01:00[Europe/Berlin]"), - UUID.randomUUID(), - Quantities.getQuantity(0.75d, PowerSystemUnits.PU), - Quantities.getQuantity(90d, PowerSystemUnits.DEGREE_GEOM) - ) + val testTopic = testTopics.headOption.value + val topicPartitions: Seq[TopicPartition] = + (0 until testTopic.partitions).map( + new TopicPartition(testTopic.name, _) + ) - When("producing the NodeResults") - resultEntitySink.handleResultEntity(nodeRes1) - resultEntitySink.handleResultEntity(nodeRes2) - resultEntitySink.handleResultEntity(nodeRes3) - - val testTopic = testTopics.headOption.value - val topicPartitions: Seq[TopicPartition] = - (0 until testTopic.partitions).map(new TopicPartition(testTopic.name, _)) - - testConsumer.assign(topicPartitions.asJava) - - Then("records can be fetched from Kafka") - eventually(timeout(5 second), interval(1 second)) { - testConsumer.seekToBeginning(topicPartitions.asJava) - val records: List[PlainNodeResult] = - testConsumer.poll((1 second) toJava).asScala.map(_.value()).toList - - records should have length 3 - records should contain( - PlainNodeResult( - runId, - PlainWriter.createSimpleTimeStamp(nodeRes1.getTime), - nodeRes1.getUuid, - nodeRes1.getInputModel, - nodeRes1.getvMag().getValue.doubleValue(), - nodeRes1.getvAng().getValue.doubleValue() + testConsumer.assign(topicPartitions.asJava) + + Then("records can be fetched from Kafka") + eventually(timeout(5 second), interval(1 second)) { + testConsumer.seekToBeginning(topicPartitions.asJava) + val records: List[PlainNodeResult] = + testConsumer.poll((1 second) toJava).asScala.map(_.value()).toList + + records should have length 3 + records should contain( + PlainNodeResult( + runId, + PlainWriter.createSimpleTimeStamp(nodeRes1.getTime), + nodeRes1.getUuid, + nodeRes1.getInputModel, + nodeRes1.getvMag().getValue.doubleValue(), + nodeRes1.getvAng().getValue.doubleValue() + ) ) - ) - records should contain( - PlainNodeResult( - runId, - PlainWriter.createSimpleTimeStamp(nodeRes2.getTime), - nodeRes2.getUuid, - nodeRes2.getInputModel, - nodeRes2.getvMag().getValue.doubleValue(), - nodeRes2.getvAng().getValue.doubleValue() + records should contain( + PlainNodeResult( + runId, + PlainWriter.createSimpleTimeStamp(nodeRes2.getTime), + nodeRes2.getUuid, + nodeRes2.getInputModel, + nodeRes2.getvMag().getValue.doubleValue(), + nodeRes2.getvAng().getValue.doubleValue() + ) ) - ) - records should contain( - PlainNodeResult( - runId, - PlainWriter.createSimpleTimeStamp(nodeRes3.getTime), - nodeRes3.getUuid, - nodeRes3.getInputModel, - nodeRes3.getvMag().getValue.doubleValue(), - nodeRes3.getvAng().getValue.doubleValue() + records should contain( + PlainNodeResult( + runId, + PlainWriter.createSimpleTimeStamp(nodeRes3.getTime), + nodeRes3.getUuid, + nodeRes3.getInputModel, + nodeRes3.getvMag().getValue.doubleValue(), + nodeRes3.getvAng().getValue.doubleValue() + ) ) - ) + } } - - resultEntitySink.close() } } diff --git a/src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala b/src/test/scala/edu/ie3/simona/test/KafkaSpecLike.scala similarity index 75% rename from src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala rename to src/test/scala/edu/ie3/simona/test/KafkaSpecLike.scala index 014aee4c30..c801aafcd6 100644 --- a/src/test/scala/edu/ie3/simona/test/KafkaFlatSpec.scala +++ b/src/test/scala/edu/ie3/simona/test/KafkaSpecLike.scala @@ -7,13 +7,10 @@ package edu.ie3.simona.test import com.dimafeng.testcontainers.KafkaContainer -import edu.ie3.simona.test.KafkaFlatSpec.Topic +import edu.ie3.simona.test.KafkaSpecLike.Topic import org.apache.kafka.clients.admin.{Admin, NewTopic} import org.junit.Rule -import org.scalatest.concurrent.Eventually -import org.scalatest.flatspec.AnyFlatSpec -import org.scalatest.matchers.should.Matchers -import org.scalatest.{BeforeAndAfterAll, GivenWhenThen, Inspectors} +import org.scalatest.{BeforeAndAfterAll, TestSuite} import org.testcontainers.utility.DockerImageName import scala.jdk.CollectionConverters._ @@ -21,13 +18,8 @@ import scala.jdk.CollectionConverters._ /** Adapted from * https://kafka-tutorials.confluent.io/produce-consume-lang/scala.html */ -trait KafkaFlatSpec - extends AnyFlatSpec - with Matchers - with Inspectors - with BeforeAndAfterAll - with GivenWhenThen - with Eventually { +trait KafkaSpecLike extends BeforeAndAfterAll { + this: TestSuite => protected val testTopics: Vector[Topic] @@ -60,7 +52,7 @@ trait KafkaFlatSpec } } -object KafkaFlatSpec { +object KafkaSpecLike { final case class Topic( name: String, partitions: Int, From 9bf9faa7570f024163f4edcc389d0643467d4a4f Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Sat, 4 Jun 2022 01:33:29 +0200 Subject: [PATCH 44/58] Preliminary LoadProfile fixes --- .../load/profile/LoadProfileKey.scala | 5 ++--- .../load/profile/LoadProfileStore.scala | 9 ++++++--- .../load/profile/ProfileLoadModel.scala | 10 ++++++---- .../participant/load/FixedLoadModelTest.groovy | 5 ++--- .../participant/load/ProfileLoadModelTest.groovy | 4 ++-- .../participant/load/RandomLoadModelTest.groovy | 2 +- .../model/participant/load/LoadModelSpec.scala | 16 +++++++--------- .../participant/load/LoadProfileStoreSpec.scala | 10 +++++----- .../test/common/input/LoadInputTestData.scala | 6 +++--- 9 files changed, 34 insertions(+), 33 deletions(-) diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala index 8f59e10e9b..d008f64754 100644 --- a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala +++ b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala @@ -7,9 +7,8 @@ package edu.ie3.simona.model.participant.load.profile import java.time.ZonedDateTime - import edu.ie3.datamodel.exceptions.ParsingException -import edu.ie3.datamodel.models.StandardLoadProfile +import edu.ie3.datamodel.models.profile.{LoadProfile, StandardLoadProfile} import edu.ie3.simona.model.participant.load import edu.ie3.simona.model.participant.load.{DayType, profile} @@ -69,7 +68,7 @@ case object LoadProfileKey { ): LoadProfileKey = { try { new LoadProfileKey( - StandardLoadProfile.parse(loadProfile), + LoadProfile.parse(loadProfile).asInstanceOf[StandardLoadProfile], Season(season), DayType(dayType) ) diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileStore.scala b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileStore.scala index 7757a0a302..8a2ffd6029 100644 --- a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileStore.scala +++ b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileStore.scala @@ -12,7 +12,10 @@ import java.util import breeze.numerics.round import com.typesafe.scalalogging.LazyLogging -import edu.ie3.datamodel.models.{BdewLoadProfile, StandardLoadProfile} +import edu.ie3.datamodel.models.profile.{ + BdewStandardLoadProfile, + StandardLoadProfile +} import edu.ie3.simona.model.participant.load.profile.LoadProfileStore.{ initializeMaxConsumptionPerProfile, initializeTypeDayValues @@ -65,7 +68,7 @@ class LoadProfileStore private (val reader: Reader) { case Some(typeDayValues) => val quarterHourEnergy = typeDayValues.getQuarterHourEnergy(time) val load = loadProfile match { - case BdewLoadProfile.H0 => + case BdewStandardLoadProfile.H0 => /* For the residential average profile, a dynamization has to be taken into account */ val t = time.getDayOfYear // leap years are ignored LoadProfileStore.dynamization(quarterHourEnergy, t) @@ -205,7 +208,7 @@ object LoadProfileStore extends LazyLogging { knownLoadProfiles .flatMap(loadProfile => { (loadProfile match { - case BdewLoadProfile.H0 => + case BdewStandardLoadProfile.H0 => // max load for h0 is expected to be exclusively found in winter, // thus we only search there. DayType.values diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/profile/ProfileLoadModel.scala b/src/main/scala/edu/ie3/simona/model/participant/load/profile/ProfileLoadModel.scala index 6cb73ff089..b9b207fef4 100644 --- a/src/main/scala/edu/ie3/simona/model/participant/load/profile/ProfileLoadModel.scala +++ b/src/main/scala/edu/ie3/simona/model/participant/load/profile/ProfileLoadModel.scala @@ -6,7 +6,7 @@ package edu.ie3.simona.model.participant.load.profile -import edu.ie3.datamodel.models.StandardLoadProfile +import edu.ie3.datamodel.models.profile.StandardLoadProfile import edu.ie3.datamodel.models.input.system.LoadInput import edu.ie3.simona.model.participant.CalcRelevantData.LoadRelevantData import edu.ie3.simona.model.participant.control.QControl @@ -136,13 +136,15 @@ case object ProfileLoadModel { QControl.apply(input.getqCharacteristics()), sRatedPowerScaled, input.getCosPhiRated, - input.getStandardLoadProfile, + input.getLoadProfile.asInstanceOf[StandardLoadProfile], reference ) case LoadReference.EnergyConsumption(energyConsumption) => val loadProfileMax = - LoadProfileStore().maxPower(input.getStandardLoadProfile) + LoadProfileStore().maxPower( + input.getLoadProfile.asInstanceOf[StandardLoadProfile] + ) val sRatedEnergy = LoadModel.scaleSRatedEnergy( input, energyConsumption, @@ -157,7 +159,7 @@ case object ProfileLoadModel { QControl.apply(input.getqCharacteristics()), sRatedEnergy, input.getCosPhiRated, - input.getStandardLoadProfile, + input.getLoadProfile.asInstanceOf[StandardLoadProfile], reference ) } diff --git a/src/test/groovy/edu/ie3/simona/model/participant/load/FixedLoadModelTest.groovy b/src/test/groovy/edu/ie3/simona/model/participant/load/FixedLoadModelTest.groovy index 925470e52a..26492f333f 100644 --- a/src/test/groovy/edu/ie3/simona/model/participant/load/FixedLoadModelTest.groovy +++ b/src/test/groovy/edu/ie3/simona/model/participant/load/FixedLoadModelTest.groovy @@ -6,7 +6,7 @@ package edu.ie3.simona.model.participant.load -import edu.ie3.datamodel.models.BdewLoadProfile +import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput @@ -14,7 +14,6 @@ import edu.ie3.datamodel.models.input.system.LoadInput import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed import edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils import edu.ie3.simona.model.SystemComponent -import edu.ie3.simona.model.participant.CalcRelevantData import edu.ie3.simona.model.participant.control.QControl import edu.ie3.util.TimeUtil import spock.lang.Specification @@ -45,7 +44,7 @@ class FixedLoadModelTest extends Specification { -1 ), new CosPhiFixed("cosPhiFixed:{(0.0,0.95)}"), - BdewLoadProfile.H0, + BdewStandardLoadProfile.H0, false, Quantities.getQuantity(3000d, KILOWATTHOUR), Quantities.getQuantity(282.74d, VOLTAMPERE), diff --git a/src/test/groovy/edu/ie3/simona/model/participant/load/ProfileLoadModelTest.groovy b/src/test/groovy/edu/ie3/simona/model/participant/load/ProfileLoadModelTest.groovy index a645ef9f6d..0f4de025b3 100644 --- a/src/test/groovy/edu/ie3/simona/model/participant/load/ProfileLoadModelTest.groovy +++ b/src/test/groovy/edu/ie3/simona/model/participant/load/ProfileLoadModelTest.groovy @@ -24,7 +24,7 @@ import javax.measure.quantity.Energy import java.time.temporal.ChronoUnit import java.util.stream.Collectors -import static edu.ie3.datamodel.models.BdewLoadProfile.* +import static edu.ie3.datamodel.models.profile.BdewStandardLoadProfile.* import static edu.ie3.simona.model.participant.load.LoadReference.ActivePower import static edu.ie3.simona.model.participant.load.LoadReference.EnergyConsumption import static edu.ie3.util.quantities.PowerSystemUnits.* @@ -71,7 +71,7 @@ class ProfileLoadModelTest extends Specification { def "A profile load model should be instantiated from valid input correctly"() { when: def actual = ProfileLoadModel.apply( - loadInput.copy().standardLoadProfile(profile).build(), + loadInput.copy().loadprofile(profile).build(), foreSeenOperationInterval, 1.0, reference) diff --git a/src/test/groovy/edu/ie3/simona/model/participant/load/RandomLoadModelTest.groovy b/src/test/groovy/edu/ie3/simona/model/participant/load/RandomLoadModelTest.groovy index 5089d7da95..171bd8ad22 100644 --- a/src/test/groovy/edu/ie3/simona/model/participant/load/RandomLoadModelTest.groovy +++ b/src/test/groovy/edu/ie3/simona/model/participant/load/RandomLoadModelTest.groovy @@ -24,7 +24,7 @@ import javax.measure.quantity.Energy import java.time.temporal.ChronoUnit import java.util.stream.Collectors -import static edu.ie3.datamodel.models.BdewLoadProfile.H0 +import static edu.ie3.datamodel.models.profile.BdewStandardLoadProfile.H0 import static edu.ie3.simona.model.participant.load.LoadReference.ActivePower import static edu.ie3.simona.model.participant.load.LoadReference.EnergyConsumption import static edu.ie3.util.quantities.PowerSystemUnits.* diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala index 467214b051..b2ea0bec8d 100644 --- a/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala +++ b/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala @@ -6,25 +6,22 @@ package edu.ie3.simona.model.participant.load +import edu.ie3.datamodel.models.profile.StandardLoadProfile import edu.ie3.simona.model.participant.control.QControl -import edu.ie3.simona.model.participant.load.LoadReference.{ - ActivePower, - EnergyConsumption -} import edu.ie3.simona.model.participant.load.profile.ProfileLoadModel import edu.ie3.simona.model.participant.load.random.RandomLoadModel import edu.ie3.simona.test.common.UnitSpec import edu.ie3.simona.test.common.input.LoadInputTestData -import edu.ie3.simona.util.ConfigUtil -import edu.ie3.util.quantities.PowerSystemUnits.{KILOWATTHOUR, MEGAVOLTAMPERE} +import edu.ie3.util.quantities.PowerSystemUnits.KILOWATTHOUR import edu.ie3.util.quantities.{PowerSystemUnits, QuantityUtil} -import javax.measure.Quantity -import javax.measure.quantity.Power import org.scalatest.PrivateMethodTester import org.scalatest.prop.TableDrivenPropertyChecks import tech.units.indriya.quantity.Quantities import tech.units.indriya.unit.Units.WATT +import javax.measure.Quantity +import javax.measure.quantity.Power + class LoadModelSpec extends UnitSpec with LoadInputTestData @@ -85,7 +82,8 @@ class LoadModelSpec quantityTolerance ) shouldBe true cosPhiRated shouldBe loadInput.getCosPhiRated - loadProfile shouldBe loadInput.getStandardLoadProfile + loadProfile shouldBe loadInput.getLoadProfile + .asInstanceOf[StandardLoadProfile] reference shouldBe foreSeenReference } } diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/LoadProfileStoreSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/LoadProfileStoreSpec.scala index a3c8d34676..2702c1b0b4 100644 --- a/src/test/scala/edu/ie3/simona/model/participant/load/LoadProfileStoreSpec.scala +++ b/src/test/scala/edu/ie3/simona/model/participant/load/LoadProfileStoreSpec.scala @@ -12,8 +12,8 @@ import java.time.temporal.ChronoUnit import breeze.numerics.abs import com.typesafe.scalalogging.LazyLogging -import edu.ie3.datamodel.models.BdewLoadProfile._ -import edu.ie3.datamodel.models.{BdewLoadProfile, StandardLoadProfile} +import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile._ +import edu.ie3.datamodel.models.profile.StandardLoadProfile import edu.ie3.simona.model.participant.load.profile.{ LoadProfileKey, LoadProfileStore, @@ -113,10 +113,10 @@ class LoadProfileStoreSpec /* List the expected annual energy consumption */ val expectedEnergyConsumption : Map[StandardLoadProfile, ComparableQuantity[Energy]] = Map( - BdewLoadProfile.H0 -> Quantities.getQuantity(1000d, KILOWATTHOUR), - BdewLoadProfile.L0 -> Quantities.getQuantity(1002d, KILOWATTHOUR), + H0 -> Quantities.getQuantity(1000d, KILOWATTHOUR), + L0 -> Quantities.getQuantity(1002d, KILOWATTHOUR), /* TODO: Check, if this is correct */ - BdewLoadProfile.G0 -> Quantities.getQuantity(1022d, KILOWATTHOUR) + G0 -> Quantities.getQuantity(1022d, KILOWATTHOUR) ) /* Collect all available time steps in 2020 */ diff --git a/src/test/scala/edu/ie3/simona/test/common/input/LoadInputTestData.scala b/src/test/scala/edu/ie3/simona/test/common/input/LoadInputTestData.scala index 139e1a577f..b99c09fc8f 100644 --- a/src/test/scala/edu/ie3/simona/test/common/input/LoadInputTestData.scala +++ b/src/test/scala/edu/ie3/simona/test/common/input/LoadInputTestData.scala @@ -7,11 +7,11 @@ package edu.ie3.simona.test.common.input import java.util.UUID - import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.LoadInput import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed -import edu.ie3.datamodel.models.{BdewLoadProfile, OperationTime} +import edu.ie3.datamodel.models.OperationTime +import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile import edu.ie3.util.quantities.PowerSystemUnits.{KILOWATTHOUR, VOLTAMPERE} import tech.units.indriya.quantity.Quantities @@ -29,7 +29,7 @@ trait LoadInputTestData extends NodeInputTestData { OperationTime.notLimited(), nodeInputNoSlackNs04KvA, new CosPhiFixed("cosPhiFixed:{(0.0,0.95)}"), - BdewLoadProfile.H0, + BdewStandardLoadProfile.H0, false, Quantities.getQuantity(3000d, KILOWATTHOUR), Quantities.getQuantity(282.74d, VOLTAMPERE), From 49f38a62974a9b5c2aa730ba923a65c05f494f20 Mon Sep 17 00:00:00 2001 From: Daniel Feismann <98817556+danielfeismann@users.noreply.github.com> Date: Tue, 7 Jun 2022 10:04:14 +0200 Subject: [PATCH 45/58] update of readme --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index cb2233bbdb..a61d0d3e29 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,7 @@ CONTRIBUTING.md file in the root directory of this repository. For all SIMONA related questions please feel free to contact people involved in the development and maintenance of SIMONA. For the moment, these people are: +- Feismann, Daniel - [daniel.feismann@tu-dortmund.de](mailto:daniel.feismann@tu-dortmund.de) - Peter, Sebastian - [sebastian.peter@tu-dortmund.de](mailto:sebastian.peter@tu-dortmund.de) - Oberließen, Thomas - [thomas.oberliessen@tu-dortmund.de](mailto:thomas.oberliessen@tu-dortmund.de) - Sen Sarma, Debopama - [debopama-sen.sarma@tu-dortmund.de](mailto:debopama-sen.sarma@tu-dortmund.de) From aa8f6911294e21bf3e5b1aeac971a0794280ce04 Mon Sep 17 00:00:00 2001 From: Daniel Feismann <98817556+danielfeismann@users.noreply.github.com> Date: Tue, 7 Jun 2022 10:21:48 +0200 Subject: [PATCH 46/58] adapt loadprofile parsing --- .../simona/model/participant/load/profile/LoadProfileKey.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala index d008f64754..4e5f7828a7 100644 --- a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala +++ b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala @@ -68,7 +68,7 @@ case object LoadProfileKey { ): LoadProfileKey = { try { new LoadProfileKey( - LoadProfile.parse(loadProfile).asInstanceOf[StandardLoadProfile], + StandardLoadProfile.parse(loadProfile), Season(season), DayType(dayType) ) From 4939faad3026320637538b43d5c69d660647d930 Mon Sep 17 00:00:00 2001 From: Daniel Feismann <98817556+danielfeismann@users.noreply.github.com> Date: Tue, 7 Jun 2022 10:32:15 +0200 Subject: [PATCH 47/58] changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e9c3f96807..5f2a30dfac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -48,6 +48,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed genConfigSample gradle task[#148](https://github.com/ie3-institute/simona/issues/148) - Fixed some unreachable code [#167](https://github.com/ie3-institute/simona/issues/167) - Fix treatment of non-InitializeTrigger triggers in initialization within SimScheduler [#237](https://github.com/ie3-institute/simona/issues/237) +- Fix breaking SIMONA caused by introducing temperature dependant load profiles in PSDM [#255](https://github.com/ie3-institute/simona/issues/255) ### Removed - Remove workaround for tscfg tmp directory [#178](https://github.com/ie3-institute/simona/issues/178) From 50f4cc38f5c81b53cfffc9d4ddc439d43083cfa0 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Wed, 8 Jun 2022 10:48:02 +0200 Subject: [PATCH 48/58] Adapting load input to changes --- input/samples/vn_simona/fullGrid/load_input.csv | 2 +- .../ie3/simona/integration/common/IntegrationSpecCommon.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/input/samples/vn_simona/fullGrid/load_input.csv b/input/samples/vn_simona/fullGrid/load_input.csv index 8e0e670c24..1b513a9556 100644 --- a/input/samples/vn_simona/fullGrid/load_input.csv +++ b/input/samples/vn_simona/fullGrid/load_input.csv @@ -1,4 +1,4 @@ -"uuid","cos_phi_rated","dsm","e_cons_annual","id","node","operates_from","operates_until","operator","q_characteristics","s_rated","standard_load_profile" +"uuid","cos_phi_rated","dsm","e_cons_annual","id","node","operates_from","operates_until","operator","q_characteristics","s_rated","load_profile" c2402412-97fa-4ca4-aa66-e6e04d010001,0.9700000286102295,false,4000.0,NS_NET126_L_F1_(36),ca3391eb-ca94-4945-ac72-e116f396f82c,,,,cosPhiFixed:{(0.00,1.00)},2.0618600845336914,h0 fa8ef266-5b15-4fdd-a145-71ba95e3463d,0.949999988079071,false,4000.0,NS_NET146_L_F3_(17),0f3ba59d-a9ce-4669-aa12-bebec42238b7,,,,cosPhiFixed:{(0.00,1.00)},2.3157899379730225,h0 4dd0785a-482c-47e3-bb82-e315083684d1,0.9700000286102295,false,4000.0,NS_NET116_L_S3_2(6),550ebca7-1455-44eb-9431-ffbf08e58bd4,,,,cosPhiFixed:{(0.00,1.00)},4.1237101554870605,h0 diff --git a/src/test/scala/edu/ie3/simona/integration/common/IntegrationSpecCommon.scala b/src/test/scala/edu/ie3/simona/integration/common/IntegrationSpecCommon.scala index 8b1fe009e9..eeb0804f77 100644 --- a/src/test/scala/edu/ie3/simona/integration/common/IntegrationSpecCommon.scala +++ b/src/test/scala/edu/ie3/simona/integration/common/IntegrationSpecCommon.scala @@ -12,7 +12,7 @@ trait IntegrationSpecCommon { * or some of your tests are failing you very likely have altered the vn_simona.conf. This config although * is NOT meant to be altered. Instead you should always use a delta config and only override the values and * files of vn_simona/vn_simona.conf. Delta configs can be created by including the config you want to change - * parameters from via include (e.g. include "input/vn_simona/vn_simona.conf") at the + * parameters from via include (e.g. include "input/samples/vn_simona/vn_simona.conf") at the * beginning of your config file and then just override the parameters you want to change! */ val configFile: String = "input/samples/vn_simona/vn_simona.conf" From e6e5c49696308a85ff5b6239e21d51412d07ce12 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Wed, 8 Jun 2022 11:03:15 +0200 Subject: [PATCH 49/58] Remove workaround --- .../edu/ie3/simona/model/participant/load/LoadModelSpec.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala index b2ea0bec8d..b5e30470c5 100644 --- a/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala +++ b/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala @@ -6,7 +6,6 @@ package edu.ie3.simona.model.participant.load -import edu.ie3.datamodel.models.profile.StandardLoadProfile import edu.ie3.simona.model.participant.control.QControl import edu.ie3.simona.model.participant.load.profile.ProfileLoadModel import edu.ie3.simona.model.participant.load.random.RandomLoadModel @@ -83,7 +82,6 @@ class LoadModelSpec ) shouldBe true cosPhiRated shouldBe loadInput.getCosPhiRated loadProfile shouldBe loadInput.getLoadProfile - .asInstanceOf[StandardLoadProfile] reference shouldBe foreSeenReference } } From 01c6a42ae8b8c9491f091049b54b4699d1534445 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Wed, 8 Jun 2022 11:07:55 +0200 Subject: [PATCH 50/58] Remove workaround --- .../edu/ie3/simona/model/participant/load/LoadModelSpec.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala b/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala index b2ea0bec8d..b5e30470c5 100644 --- a/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala +++ b/src/test/scala/edu/ie3/simona/model/participant/load/LoadModelSpec.scala @@ -6,7 +6,6 @@ package edu.ie3.simona.model.participant.load -import edu.ie3.datamodel.models.profile.StandardLoadProfile import edu.ie3.simona.model.participant.control.QControl import edu.ie3.simona.model.participant.load.profile.ProfileLoadModel import edu.ie3.simona.model.participant.load.random.RandomLoadModel @@ -83,7 +82,6 @@ class LoadModelSpec ) shouldBe true cosPhiRated shouldBe loadInput.getCosPhiRated loadProfile shouldBe loadInput.getLoadProfile - .asInstanceOf[StandardLoadProfile] reference shouldBe foreSeenReference } } From 1e459a720e71463edc7f2839e656380571b2561b Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Wed, 8 Jun 2022 11:13:24 +0200 Subject: [PATCH 51/58] Adapting load input to changes in PSDM --- input/samples/vn_simona/fullGrid/load_input.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/input/samples/vn_simona/fullGrid/load_input.csv b/input/samples/vn_simona/fullGrid/load_input.csv index 8e0e670c24..1b513a9556 100644 --- a/input/samples/vn_simona/fullGrid/load_input.csv +++ b/input/samples/vn_simona/fullGrid/load_input.csv @@ -1,4 +1,4 @@ -"uuid","cos_phi_rated","dsm","e_cons_annual","id","node","operates_from","operates_until","operator","q_characteristics","s_rated","standard_load_profile" +"uuid","cos_phi_rated","dsm","e_cons_annual","id","node","operates_from","operates_until","operator","q_characteristics","s_rated","load_profile" c2402412-97fa-4ca4-aa66-e6e04d010001,0.9700000286102295,false,4000.0,NS_NET126_L_F1_(36),ca3391eb-ca94-4945-ac72-e116f396f82c,,,,cosPhiFixed:{(0.00,1.00)},2.0618600845336914,h0 fa8ef266-5b15-4fdd-a145-71ba95e3463d,0.949999988079071,false,4000.0,NS_NET146_L_F3_(17),0f3ba59d-a9ce-4669-aa12-bebec42238b7,,,,cosPhiFixed:{(0.00,1.00)},2.3157899379730225,h0 4dd0785a-482c-47e3-bb82-e315083684d1,0.9700000286102295,false,4000.0,NS_NET116_L_S3_2(6),550ebca7-1455-44eb-9431-ffbf08e58bd4,,,,cosPhiFixed:{(0.00,1.00)},4.1237101554870605,h0 From f3983fb2e45f2f4a2c2127139a09ed4ffbdc3beb Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Wed, 8 Jun 2022 13:48:36 +0200 Subject: [PATCH 52/58] Addressing Thomas' comments --- .../event/listener/ResultEventListener.scala | 17 ++++++---- .../simona/io/result/plain/PlainResult.scala | 23 +++++++++++++ .../simona/io/result/plain/PlainWriter.scala | 34 +++++++++++++++++-- .../load/profile/LoadProfileKey.scala | 5 +-- .../io/result/ResultEntityKafkaSpec.scala | 3 +- 5 files changed, 69 insertions(+), 13 deletions(-) diff --git a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala index 43b59cf1eb..5f908d31c3 100644 --- a/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala +++ b/src/main/scala/edu/ie3/simona/event/listener/ResultEventListener.scala @@ -180,12 +180,17 @@ class ResultEventListener( override def preStart(): Unit = { log.debug("Starting initialization!") - log.debug( - s"Events that will be processed: {}", - resultFileHierarchy.resultEntitiesToConsider - .map(_.getSimpleName) - .mkString(",") - ) + resultFileHierarchy.resultSinkType match { + case _: ResultSinkType.Kafka => + log.debug("NodeResults will be processed by a Kafka sink.") + case _ => + log.debug( + s"Events that will be processed: {}", + resultFileHierarchy.resultEntitiesToConsider + .map(_.getSimpleName) + .mkString(",") + ) + } self ! Init } diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala index f9bd3489b0..a590441007 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainResult.scala @@ -8,9 +8,32 @@ package edu.ie3.simona.io.result.plain import java.util.UUID +/** Results that are sent out with Kafka and avro should use this trait and + * corresponding implementing classes, since these give more control over + * attribute types and naming and they include sim run id. Plain result objects + * can be created by [[PlainWriter]]. + */ sealed trait PlainResult object PlainResult { + + /** Plain result class for [[edu.ie3.datamodel.models.result.NodeResult]]. + * + * @param simRunId + * the simulation run id + * @param time + * the current time, formatted by [[PlainWriter.createSimpleTimeStamp]] + * @param uuid + * the uuid identifying this result event + * @param inputModel + * the uuid of the model that created this event + * @param vMag + * the voltage magnitude as a [[Double]] in + * [[edu.ie3.util.quantities.PowerSystemUnits#PU]] + * @param vAng + * the voltage angle as a [[Double]] in + * [[edu.ie3.util.quantities.PowerSystemUnits#DEGREE_GEOM]] + */ final case class PlainNodeResult( simRunId: UUID, time: String, diff --git a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala index d9b5ca1d67..993f43d0c9 100644 --- a/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala +++ b/src/main/scala/edu/ie3/simona/io/result/plain/PlainWriter.scala @@ -11,20 +11,47 @@ import edu.ie3.simona.io.result.plain.PlainResult.PlainNodeResult import edu.ie3.util.quantities.PowerSystemUnits import tech.units.indriya.quantity.Quantities -import java.time.ZonedDateTime import java.time.format.DateTimeFormatter +import java.time.{ZoneId, ZonedDateTime} import java.util.UUID +/** Converts a [[ResultEntity]] into a [[PlainResult]] and vice versa. + * @tparam F + * the type of [[ResultEntity]] + * @tparam P + * the type of [[PlainResult]] + */ sealed trait PlainWriter[F <: ResultEntity, P <: PlainResult] { + + /** Converts a regular [[ResultEntity]] of type [[F]] into a [[PlainResult]] + * of type [[P]] + * @param full + * the [[ResultEntity]] to convert + * @return + * the resulting [[PlainResult]] + */ def writePlain(full: F): P + /** Converts a [[PlainResult]] of type [[P]] into a regular [[ResultEntity]] + * of type [[F]] + * @param plain + * the [[PlainResult]] to convert + * @return + * the resulting [[ResultEntity]] + */ def createFull(plain: P): F } object PlainWriter { private lazy val timeFormatter = - DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") + DateTimeFormatter + .ofPattern("yyyy-MM-dd HH:mm:ss") + .withZone(ZoneId.of("UTC")) + /** Converts [[NodeResult]]s into [[PlainNodeResult]]s and vice versa + * @param simRunId + * the simulation run id to use for plain results + */ final case class NodeResultWriter(simRunId: UUID) extends PlainWriter[NodeResult, PlainNodeResult] { @@ -41,8 +68,9 @@ object PlainWriter { override def createFull(plain: PlainNodeResult): NodeResult = { new NodeResult( - ZonedDateTime.parse(plain.time), plain.uuid, + ZonedDateTime.parse(plain.time, timeFormatter), + plain.inputModel, Quantities.getQuantity(plain.vMag, PowerSystemUnits.PU), Quantities.getQuantity(plain.vAng, PowerSystemUnits.DEGREE_GEOM) ) diff --git a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala index d008f64754..8cb646dbec 100644 --- a/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala +++ b/src/main/scala/edu/ie3/simona/model/participant/load/profile/LoadProfileKey.scala @@ -7,8 +7,9 @@ package edu.ie3.simona.model.participant.load.profile import java.time.ZonedDateTime + import edu.ie3.datamodel.exceptions.ParsingException -import edu.ie3.datamodel.models.profile.{LoadProfile, StandardLoadProfile} +import edu.ie3.datamodel.models.profile.StandardLoadProfile import edu.ie3.simona.model.participant.load import edu.ie3.simona.model.participant.load.{DayType, profile} @@ -68,7 +69,7 @@ case object LoadProfileKey { ): LoadProfileKey = { try { new LoadProfileKey( - LoadProfile.parse(loadProfile).asInstanceOf[StandardLoadProfile], + StandardLoadProfile.parse(loadProfile), Season(season), DayType(dayType) ) diff --git a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala index 69052af3a1..3f22986085 100644 --- a/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala +++ b/src/test/scala/edu/ie3/simona/io/result/ResultEntityKafkaSpec.scala @@ -17,7 +17,7 @@ import edu.ie3.simona.io.result.plain.PlainResult.PlainNodeResult import edu.ie3.simona.io.result.plain.PlainWriter import edu.ie3.simona.test.KafkaSpecLike import edu.ie3.simona.test.KafkaSpecLike.Topic -import edu.ie3.simona.test.common.{TestKitWithShutdown, UnitSpec} +import edu.ie3.simona.test.common.TestKitWithShutdown import edu.ie3.simona.util.ResultFileHierarchy import edu.ie3.util.quantities.PowerSystemUnits import edu.ie3.util.scala.io.ScalaReflectionSerde @@ -52,7 +52,6 @@ class ResultEntityKafkaSpec ) ) ) - with UnitSpec with KafkaSpecLike with GivenWhenThen with Eventually { From b0e89a5980c5e587488449b1cfa56542e5845721 Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Wed, 8 Jun 2022 13:48:51 +0200 Subject: [PATCH 53/58] Providing a test for PlainWriter --- .../io/result/plain/PlainWriterSpec.scala | 98 +++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 src/test/scala/edu/ie3/simona/io/result/plain/PlainWriterSpec.scala diff --git a/src/test/scala/edu/ie3/simona/io/result/plain/PlainWriterSpec.scala b/src/test/scala/edu/ie3/simona/io/result/plain/PlainWriterSpec.scala new file mode 100644 index 0000000000..ab3c7a8390 --- /dev/null +++ b/src/test/scala/edu/ie3/simona/io/result/plain/PlainWriterSpec.scala @@ -0,0 +1,98 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ + +package edu.ie3.simona.io.result.plain + +import edu.ie3.datamodel.models.result.NodeResult +import edu.ie3.simona.io.result.plain.PlainResult.PlainNodeResult +import edu.ie3.simona.io.result.plain.PlainWriter.NodeResultWriter +import edu.ie3.simona.test.common.UnitSpec +import edu.ie3.util.TimeUtil +import edu.ie3.util.quantities.PowerSystemUnits +import org.scalatest.GivenWhenThen +import tech.units.indriya.quantity.Quantities + +import java.time.ZoneId +import java.time.format.DateTimeFormatter +import java.util.UUID + +class PlainWriterSpec extends UnitSpec with GivenWhenThen { + + "A NodeResultWriter" should { + val simRunId = UUID.randomUUID() + val plainWriter = NodeResultWriter(simRunId) + + val timeFormatter = + DateTimeFormatter + .ofPattern("yyyy-MM-dd HH:mm:ss") + .withZone(ZoneId.of("UTC")) + + "should write a plain result correctly" in { + Given("a full NodeResult") + val eventId = UUID.randomUUID() + val time = TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:00:00") + val inputModelId = UUID.randomUUID() + val vMag = Quantities.getQuantity(0.85d, PowerSystemUnits.PU) + val vAng = Quantities.getQuantity(90d, PowerSystemUnits.DEGREE_GEOM) + + val nodeResultFull = new NodeResult( + eventId, + time, + inputModelId, + vMag, + vAng + ) + + When("converting to a plain result") + val plainResult = plainWriter.writePlain(nodeResultFull) + + Then("plain result is correct") + plainResult.uuid shouldBe eventId + plainResult.time shouldBe time.format(timeFormatter) + plainResult.inputModel shouldBe inputModelId + plainResult.vMag shouldBe vMag + .to(PowerSystemUnits.PU) + .getValue + .doubleValue() + plainResult.vAng shouldBe vAng + .to(PowerSystemUnits.DEGREE_GEOM) + .getValue + .doubleValue() + } + + "should write a full result correctly" in { + Given("a plain NodeResult") + val eventId = UUID.randomUUID() + val time = "2020-01-01 00:00:00" + val inputModelId = UUID.randomUUID() + val vMag = 0.85d + val vAng = 90d + + val nodeResultPlain = PlainNodeResult( + simRunId, + time, + eventId, + inputModelId, + vMag, + vAng + ) + + When("converting to a full NodeResult") + val plainResult = plainWriter.createFull(nodeResultPlain) + + Then("plain result is correct") + plainResult.getUuid shouldBe eventId + plainResult.getTime shouldBe TimeUtil.withDefaults.toZonedDateTime(time) + plainResult.getInputModel shouldBe inputModelId + plainResult + .getvMag() shouldBe Quantities.getQuantity(vMag, PowerSystemUnits.PU) + plainResult.getvAng() shouldBe Quantities.getQuantity( + vAng, + PowerSystemUnits.DEGREE_GEOM + ) + } + } +} From 24a52a8379fa098ced63dab11ea021a1860feb4c Mon Sep 17 00:00:00 2001 From: Sebastian Peter Date: Wed, 8 Jun 2022 14:07:20 +0200 Subject: [PATCH 54/58] Fixing ResultEventListenerSpec --- .../event/listener/ResultEventListenerSpec.scala | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala b/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala index 49d1743d10..efef169a2e 100644 --- a/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala +++ b/src/test/scala/edu/ie3/simona/event/listener/ResultEventListenerSpec.scala @@ -87,19 +87,16 @@ class ResultEventListenerSpec ) def createDir( - resultFileHierarchy: ResultFileHierarchy, - resultEntitiesToBeWritten: Set[Class[_ <: ResultEntity]] - ): Iterable[Future[(Class[_], ResultEntitySink)]] = { + resultFileHierarchy: ResultFileHierarchy + ): Iterable[Future[ResultEntitySink]] = { val materializer: Materializer = Materializer(system) - val initializeSinks - : PrivateMethod[Iterable[Future[(Class[_], ResultEntitySink)]]] = - PrivateMethod[Iterable[Future[(Class[_], ResultEntitySink)]]]( + val initializeSinks: PrivateMethod[Iterable[Future[ResultEntitySink]]] = + PrivateMethod[Iterable[Future[ResultEntitySink]]]( Symbol("initializeSinks") ) ResultEventListener invokePrivate initializeSinks( - resultEntitiesToBeWritten, resultFileHierarchy, materializer ) @@ -123,7 +120,7 @@ class ResultEventListenerSpec "initialize its sinks correctly" in { val fileHierarchy = resultFileHierarchy(1, ".csv") Await.ready( - Future.sequence(createDir(fileHierarchy, resultEntitiesToBeWritten)), + Future.sequence(createDir(fileHierarchy)), 60 seconds ) From 1bba5c689e9d2d7f1c7d1e07f1ec237e13363d4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Jun 2022 14:08:45 +0000 Subject: [PATCH 55/58] Bump com.diffplug.spotless from 6.6.1 to 6.7.0 (#252) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index b0ae0f9771..38e163d654 100644 --- a/build.gradle +++ b/build.gradle @@ -7,7 +7,7 @@ plugins { id 'signing' id 'maven-publish' // publish to a maven repo (local or mvn central, has to be defined) id 'pmd' // code check, working on source code - id 'com.diffplug.spotless' version '6.6.1'// code format + id 'com.diffplug.spotless' version '6.7.0'// code format id 'com.github.onslip.gradle-one-jar' version '1.0.6' // pack a self contained jar id "com.github.ben-manes.versions" version '0.42.0' id "de.undercouch.download" version "5.1.0" // downloads plugin From 424ac660b2495e92b2cbb611aa3bac2de5478693 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Jun 2022 12:35:25 +0000 Subject: [PATCH 56/58] Bump org.sonarqube from 3.3 to 3.4.0.2513 (#259) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 38e163d654..0df62cc60a 100644 --- a/build.gradle +++ b/build.gradle @@ -13,7 +13,7 @@ plugins { id "de.undercouch.download" version "5.1.0" // downloads plugin id "kr.motd.sphinx" version "2.10.1" // documentation generation id "com.github.johnrengelman.shadow" version "7.1.2" // fat jar - id "org.sonarqube" version "3.3" // sonarqube + id "org.sonarqube" version "3.4.0.2513" // sonarqube id "org.scoverage" version "7.0.0" // scala code coverage scoverage id "com.github.maiflai.scalatest" version "0.32" // run scalatest without specific spec task id 'org.hidetake.ssh' version '2.10.1' From f66d8382a2e3ef62d4cc6d64c898e4e90ed9b559 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Jun 2022 08:30:48 +0000 Subject: [PATCH 57/58] Bump com.diffplug.spotless from 6.7.0 to 6.7.2 (#260) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 0df62cc60a..605fa4231a 100644 --- a/build.gradle +++ b/build.gradle @@ -7,7 +7,7 @@ plugins { id 'signing' id 'maven-publish' // publish to a maven repo (local or mvn central, has to be defined) id 'pmd' // code check, working on source code - id 'com.diffplug.spotless' version '6.7.0'// code format + id 'com.diffplug.spotless' version '6.7.2'// code format id 'com.github.onslip.gradle-one-jar' version '1.0.6' // pack a self contained jar id "com.github.ben-manes.versions" version '0.42.0' id "de.undercouch.download" version "5.1.0" // downloads plugin From 259d20911ef2b2a04cd58dea53b19afb3d2963ff Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Jun 2022 11:23:06 +0000 Subject: [PATCH 58/58] Bump avro4s-core_2.13 from 4.0.13 to 4.1.0 (#263) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 7eb6d589ac..f4795529e8 100644 --- a/build.gradle +++ b/build.gradle @@ -146,7 +146,7 @@ dependencies { /* Kafka */ implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.2.0' implementation 'io.confluent:kafka-streams-avro-serde:7.1.1' - implementation "com.sksamuel.avro4s:avro4s-core_${scalaVersion}:4.0.13" + implementation "com.sksamuel.avro4s:avro4s-core_${scalaVersion}:4.1.0" implementation 'org.apache.commons:commons-math3:3.6.1' // apache commons math3 implementation 'org.apache.poi:poi-ooxml:5.2.2' // used for FilenameUtils