From 652de082026d0ee7ace355e6aeabf53254f50758 Mon Sep 17 00:00:00 2001
From: MaxEmerold <wiedenhoeft.max@gmail.com>
Date: Tue, 7 Dec 2021 10:48:30 +0100
Subject: [PATCH] Provide basic implementations for Hazelcast Jet

---
 .../groovy/theodolite.hazelcastjet.gradle     |  34 ++
 .../resources/jobmanager-deployment.yaml      |   2 +-
 .../resources/taskmanager-deployment.yaml     |   2 +-
 .../uc1-hazelcastjet-deployment.yaml          |  62 +++
 .../uc1-hazelcastjet-service.yaml             |  16 +
 .../uc1-load-generator-deployment.yaml        |  32 ++
 .../uc1-load-generator-service.yaml           |  17 +
 .../resources/jobmanager-deployment.yaml      |   2 +-
 .../resources/taskmanager-deployment.yaml     |   2 +-
 .../uc2-benchmark-operator.yaml               |  37 ++
 .../uc2-benchmark-standalone.yaml             |  34 ++
 .../uc2-hazelcastjet-deployment.yaml          |  66 +++
 .../uc2-hazelcastjet-service.yaml             |  16 +
 .../uc2-load-generator-deployment.yaml        |  32 ++
 .../uc2-load-generator-service.yaml           |  16 +
 .../resources/jobmanager-deployment.yaml      |   2 +-
 .../resources/taskmanager-deployment.yaml     |   2 +-
 .../uc3-hazelcastjet-deployment.yaml          |  68 +++
 .../uc3-hazelcastjet-service.yaml             |  16 +
 .../uc3-load-generator-deployment.yaml        |  32 ++
 .../uc3-load-generator-service.yaml           |  16 +
 .../resources/jobmanager-deployment.yaml      |   2 +-
 .../resources/taskmanager-deployment.yaml     |   2 +-
 .../uc4-hazelcastjet-deployment.yaml          |  66 +++
 .../uc4-hazelcastjet-service.yaml             |  16 +
 .../uc4-load-generator-deployment.yaml        |  34 ++
 .../uc4-load-generator-service.yaml           |  16 +
 .../docker-compose.yml                        |  73 ++++
 .../docker-compose.yml                        |  66 +++
 .../docker-compose.yml                        |  68 +++
 .../docker-compose.yml                        |  57 +++
 .../hazelcastjet-commons/build.gradle         |  24 ++
 .../hazelcastjet/BenchmarkConfigBuilder.java  |  64 +++
 .../commons/hazelcastjet/ClusterConfig.java   |  76 ++++
 .../hazelcastjet/ConfigurationKeys.java       |  30 ++
 .../hazelcastjet/JetInstanceBuilder.java      |  60 +++
 theodolite-benchmarks/settings.gradle         |   5 +
 .../.settings/org.eclipse.jdt.ui.prefs        | 127 ++++++
 .../qa.eclipse.plugin.checkstyle.prefs        |   4 +
 .../.settings/qa.eclipse.plugin.pmd.prefs     |   4 +
 .../uc1-hazelcastjet/Dockerfile               |   7 +
 .../uc1-hazelcastjet/build.gradle             |   6 +
 .../uc1/application/HistoryService.java       |  65 +++
 .../application/Uc1HazelcastJetFactory.java   | 172 ++++++++
 .../Uc1KafkaPropertiesBuilder.java            |  43 ++
 .../uc1/application/Uc1PipelineBuilder.java   |  61 +++
 .../resources/META-INF/application.properties |   8 +
 .../uc1/application/Uc1PipelineTest.java      |  94 +++++
 .../uc2-hazelcastjet/Dockerfile               |   7 +
 .../uc2-hazelcastjet/build.gradle             |   6 +
 .../uc2/application/HistoryService.java       |  67 +++
 .../application/Uc2HazelcastJetFactory.java   | 313 ++++++++++++++
 .../Uc2KafkaPropertiesBuilder.java            |  64 +++
 .../uc2/application/Uc2PipelineBuilder.java   |  63 +++
 .../uc2/applicationold/ClusterConfig.java     |  76 ++++
 .../uc2/applicationold/ConfigurationKeys.java |  44 ++
 .../uc2/applicationold/HistoryService.java    | 264 ++++++++++++
 .../resources/META-INF/application.properties |   8 +
 .../uc3-hazelcastjet/Dockerfile               |   7 +
 .../uc3-hazelcastjet/build.gradle             |   6 +
 .../uc3/application/HistoryService.java       |  71 ++++
 .../application/Uc3HazelcastJetFactory.java   | 358 ++++++++++++++++
 .../Uc3KafkaPropertiesBuilder.java            |  64 +++
 .../uc3/application/Uc3PipelineBuilder.java   |  94 +++++
 .../uc3specifics/HourOfDayKey.java            |  47 +++
 .../uc3specifics/HourOfDayKeySerializer.java  |  29 ++
 .../uc3specifics/HoursOfDayKeyFactory.java    |  19 +
 .../uc3specifics/StatsKeyFactory.java         |  17 +
 .../uc3/applicationold/ClusterConfig.java     |  76 ++++
 .../uc3/applicationold/ConfigurationKeys.java |  48 +++
 .../uc3/applicationold/HistoryService.java    | 303 ++++++++++++++
 .../resources/META-INF/application.properties |   8 +
 .../uc4-hazelcastjet/Dockerfile               |   7 +
 .../uc4-hazelcastjet/build.gradle             |   6 +
 .../uc4/application/HistoryService.java       |  68 +++
 .../application/Uc4HazelcastJetFactory.java   | 386 ++++++++++++++++++
 .../Uc4KafkaPropertiesBuilder.java            | 124 ++++++
 .../uc4/application/Uc4PipelineBuilder.java   | 180 ++++++++
 .../application/Uc4PipelineBuilderNew.java    | 278 +++++++++++++
 .../uc4specifics/ChildParentsTransformer.java | 111 +++++
 .../uc4specifics/EventDeserializer.java       |  33 ++
 .../uc4specifics/HashMapSupplier.java         |  23 ++
 .../uc4specifics/SensorGroupKey.java          |  50 +++
 .../SensorGroupKeySerializer.java             |  30 ++
 .../application/uc4specifics/ValueGroup.java  |  54 +++
 .../uc4specifics/ValueGroupSerializer.java    |  32 ++
 .../resources/META-INF/application.properties |   8 +
 87 files changed, 5197 insertions(+), 8 deletions(-)
 create mode 100644 theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle
 create mode 100644 theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-deployment.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-service.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-load-generator-deployment.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-load-generator-service.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-standalone.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-deployment.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-service.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-load-generator-deployment.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-load-generator-service.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-deployment.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-service.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-load-generator-deployment.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-load-generator-service.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-deployment.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-service.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-load-generator-deployment.yaml
 create mode 100644 theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-load-generator-service.yaml
 create mode 100644 theodolite-benchmarks/docker-test/uc1-hazelcastjet-docker-compose/docker-compose.yml
 create mode 100644 theodolite-benchmarks/docker-test/uc2-hazelcastjet-docker-compose/docker-compose.yml
 create mode 100644 theodolite-benchmarks/docker-test/uc3-hazelcastjet-docker-compose/docker-compose.yml
 create mode 100644 theodolite-benchmarks/docker-test/uc4-hazelcastjet-docker-compose/docker-compose.yml
 create mode 100644 theodolite-benchmarks/hazelcastjet-commons/build.gradle
 create mode 100644 theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/BenchmarkConfigBuilder.java
 create mode 100644 theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/ClusterConfig.java
 create mode 100644 theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/ConfigurationKeys.java
 create mode 100644 theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/JetInstanceBuilder.java
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/Dockerfile
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/build.gradle
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/HistoryService.java
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1HazelcastJetFactory.java
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1KafkaPropertiesBuilder.java
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1PipelineBuilder.java
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties
 create mode 100644 theodolite-benchmarks/uc1-hazelcastjet/src/test/java/theodolite/uc1/application/Uc1PipelineTest.java
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/Dockerfile
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/build.gradle
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/HistoryService.java
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2HazelcastJetFactory.java
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2KafkaPropertiesBuilder.java
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2PipelineBuilder.java
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/ClusterConfig.java
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/ConfigurationKeys.java
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/HistoryService.java
 create mode 100644 theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/Dockerfile
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/build.gradle
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/HistoryService.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3HazelcastJetFactory.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3KafkaPropertiesBuilder.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3PipelineBuilder.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HourOfDayKey.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HourOfDayKeySerializer.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HoursOfDayKeyFactory.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/StatsKeyFactory.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/ClusterConfig.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/ConfigurationKeys.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/HistoryService.java
 create mode 100644 theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/Dockerfile
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/build.gradle
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/HistoryService.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4HazelcastJetFactory.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4KafkaPropertiesBuilder.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4PipelineBuilder.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4PipelineBuilderNew.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ChildParentsTransformer.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/EventDeserializer.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/HashMapSupplier.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/SensorGroupKey.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/SensorGroupKeySerializer.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ValueGroup.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ValueGroupSerializer.java
 create mode 100644 theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties

diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle
new file mode 100644
index 000000000..fc916f03d
--- /dev/null
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle
@@ -0,0 +1,34 @@
+plugins {
+  // common java conventions
+  id 'theodolite.java-conventions'
+
+  // make executable
+  id 'application'
+  
+  //id "org.sonarqube" version "3.0"
+}
+
+repositories {
+  jcenter()
+  maven {
+    url "https://oss.sonatype.org/content/repositories/snapshots/"
+  }
+  maven {
+      url 'https://packages.confluent.io/maven/'
+  }
+}
+
+dependencies {
+	implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
+    implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
+    implementation 'com.hazelcast.jet:hazelcast-jet:4.5'
+    implementation 'com.hazelcast.jet:hazelcast-jet-kafka:4.5'
+    implementation 'com.hazelcast:hazelcast-kubernetes:2.2.2'
+    implementation 'io.confluent:kafka-avro-serializer:5.3.0'
+ 	implementation 'org.slf4j:slf4j-api:1.7.25'
+ 	implementation 'com.google.code.gson:gson:2.8.2'
+ 	implementation project(':hazelcastjet-commons')
+ 	compile 'com.hazelcast.jet:hazelcast-jet-core:4.5:tests'
+	compile 'com.hazelcast:hazelcast:4.2:tests'
+ 	 
+}
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc1-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc1-flink/resources/jobmanager-deployment.yaml
index 1f328b1cd..43992a6e9 100644
--- a/theodolite-benchmarks/definitions/uc1-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc1-flink/resources/jobmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: jobmanager
-          image: ghcr.io/cau-se/theodolite-uc1-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc1-flink:flink-rm-rebalance-latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-cp-kafka:9092"
diff --git a/theodolite-benchmarks/definitions/uc1-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc1-flink/resources/taskmanager-deployment.yaml
index c2266a4ae..6f28fafff 100644
--- a/theodolite-benchmarks/definitions/uc1-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc1-flink/resources/taskmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: taskmanager
-          image: ghcr.io/cau-se/theodolite-uc1-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc1-flink:flink-rm-rebalance-latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-cp-kafka:9092"
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-deployment.yaml
new file mode 100644
index 000000000..1fb8caf37
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-deployment.yaml
@@ -0,0 +1,62 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 2
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc1-hazelcastjet
+          imagePullPolicy: "Never"
+          ports:
+            - containerPort: 5555
+              name: jmx
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "my-confluent-cp-kafka:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://my-confluent-cp-schema-registry:8081"
+            - name: JAVA_OPTS
+              value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
+        #- name: prometheus-jmx-exporter
+        #  image: "solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143"
+        #  command:
+        #    - java
+        #    - -XX:+UnlockExperimentalVMOptions
+        #    - -XX:+UseCGroupMemoryLimitForHeap
+        #    - -XX:MaxRAMFraction=1
+        #    - -XshowSettings:vm
+        #    - -jar
+        #    - jmx_prometheus_httpserver.jar
+        #    - "5556"
+        #    - /etc/jmx-aggregation/jmx-kafka-prometheus.yml
+        #  ports:
+        #    - containerPort: 5556
+        #  volumeMounts:
+        #    - name: jmx-config
+        #      mountPath: /etc/jmx-aggregation
+      volumes:
+        - name: jmx-config
+          configMap:
+            name: aggregation-jmx-configmap
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-service.yaml
new file mode 100644
index 000000000..845ce7dd5
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-load-generator-deployment.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-load-generator-deployment.yaml
new file mode 100644
index 000000000..48532c7da
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-load-generator-deployment.yaml
@@ -0,0 +1,32 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-load-generator
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-load-generator
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-load-generator
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: workload-generator
+          image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
+          ports:
+            - containerPort: 5701
+              name: coordination
+          env:
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-load-generator.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "my-confluent-cp-kafka:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://my-confluent-cp-schema-registry:8081"
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-load-generator-service.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-load-generator-service.yaml
new file mode 100644
index 000000000..a50fed409
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-load-generator-service.yaml
@@ -0,0 +1,17 @@
+apiVersion: v1
+kind: Service
+metadata:
+  name: titan-ccp-load-generator
+  labels:
+    app: titan-ccp-load-generator
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:
+    app: titan-ccp-load-generator
+  ports:
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
+      
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc2-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc2-flink/resources/jobmanager-deployment.yaml
index 87ea174f7..3e7a82cb7 100644
--- a/theodolite-benchmarks/definitions/uc2-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc2-flink/resources/jobmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: jobmanager
-          image: ghcr.io/cau-se/theodolite-uc2-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc2-flink:flink-rm-rebalance-latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-cp-kafka:9092"
diff --git a/theodolite-benchmarks/definitions/uc2-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc2-flink/resources/taskmanager-deployment.yaml
index c37df972a..3690ed7c4 100644
--- a/theodolite-benchmarks/definitions/uc2-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc2-flink/resources/taskmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: taskmanager
-          image: ghcr.io/cau-se/theodolite-uc2-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc2-flink:flink-rm-rebalance-latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-cp-kafka:9092"
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml
new file mode 100644
index 000000000..3678655f2
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml
@@ -0,0 +1,37 @@
+apiVersion: theodolite.com/v1alpha1
+kind: benchmark
+metadata:
+  name: uc2-hazelcastjet
+appResource:
+  - "uc2-hazelcastjet-deployment.yaml"
+  - "uc2-hazelcastjet-service.yaml"
+  #- "uc2-jmx-configmap.yaml"
+  #- "uc2-service-monitor.yaml"
+loadGenResource:
+  - "uc2-load-generator-deployment.yaml"
+  - "uc2-load-generator-service.yaml"
+resourceTypes:
+  - typeName: "Instances"
+    patchers:
+      - type: "ReplicaPatcher"
+        resource: "uc2-hazelcastjet-deployment.yaml"
+loadTypes:
+  - typeName: "NumSensors"
+    patchers:
+      - type: "EnvVarPatcher"
+        resource: "uc2-load-generator-deployment.yaml"
+        container: "workload-generator"
+        variableName: "NUM_SENSORS"
+      - type: NumSensorsLoadGeneratorReplicaPatcher
+        resource: "uc2-load-generator-deployment.yaml"
+kafkaConfig:
+  bootstrapServer: "theodolite-cp-kafka:9092"
+  topics:
+    - name: "input"
+      numPartitions: 40
+      replicationFactor: 1
+    - name: "output"
+      numPartitions: 40
+      replicationFactor: 1
+    - name: "theodolite-.*"
+      removeOnly: True
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-standalone.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-standalone.yaml
new file mode 100644
index 000000000..d1947a369
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-standalone.yaml
@@ -0,0 +1,34 @@
+name: "uc2-hazelcastjet"
+appResource:
+  - "uc2-hazelcastjet-deployment.yaml"
+  - "uc2-hazelcastjet-service.yaml"
+  #- "uc2-jmx-configmap.yaml"
+  #- "uc2-service-monitor.yaml"
+loadGenResource:
+  - "uc2-load-generator-deployment.yaml"
+  - "uc2-load-generator-service.yaml"
+resourceTypes:
+  - typeName: "Instances"
+    patchers:
+      - type: "ReplicaPatcher"
+        resource: "uc2-hazelcastjet-deployment.yaml"
+loadTypes:
+  - typeName: "NumSensors"
+    patchers:
+      - type: "EnvVarPatcher"
+        resource: "uc2-load-generator-deployment.yaml"
+        container: "workload-generator"
+        variableName: "NUM_SENSORS"
+      - type: NumSensorsLoadGeneratorReplicaPatcher
+        resource: "uc2-load-generator-deployment.yaml"
+kafkaConfig:
+  bootstrapServer: "theodolite-cp-kafka:9092"
+  topics:
+    - name: "input"
+      numPartitions: 40
+      replicationFactor: 1
+    - name: "output"
+      numPartitions: 40
+      replicationFactor: 1
+    - name: "theodolite-.*"
+      removeOnly: True
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-deployment.yaml
new file mode 100644
index 000000000..a00997736
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-deployment.yaml
@@ -0,0 +1,66 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 2
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc2-hazelcastjet
+          imagePullPolicy: "Never"
+          ports:
+            - containerPort: 5555
+              name: jmx
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "my-confluent-cp-kafka:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://my-confluent-cp-schema-registry:8081"
+            - name: JAVA_OPTS
+              value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: DOWNSAMPLE_INTERVAL
+              value: "5000"
+            #- name: KUBERNETES_DNS_NAME
+            #  value: "titan-ccp-aggregation"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
+        #- name: prometheus-jmx-exporter
+        #  image: "solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143"
+        #  command:
+        #    - java
+        #    - -XX:+UnlockExperimentalVMOptions
+        #    - -XX:+UseCGroupMemoryLimitForHeap
+        #    - -XX:MaxRAMFraction=1
+        #    - -XshowSettings:vm
+        #    - -jar
+        #    - jmx_prometheus_httpserver.jar
+        #    - "5556"
+        #    - /etc/jmx-aggregation/jmx-kafka-prometheus.yml
+        #  ports:
+        #    - containerPort: 5556
+        #  volumeMounts:
+        #    - name: jmx-config
+        #      mountPath: /etc/jmx-aggregation
+      volumes:
+        - name: jmx-config
+          configMap:
+            name: aggregation-jmx-configmap
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-service.yaml
new file mode 100644
index 000000000..845ce7dd5
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-load-generator-deployment.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-load-generator-deployment.yaml
new file mode 100644
index 000000000..a937c1813
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-load-generator-deployment.yaml
@@ -0,0 +1,32 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-load-generator
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-load-generator
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-load-generator
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: workload-generator
+          image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+          ports:
+            - containerPort: 5701
+              name: coordination
+          env:
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-load-generator.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "my-confluent-cp-kafka:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://my-confluent-cp-schema-registry:8081"
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-load-generator-service.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-load-generator-service.yaml
new file mode 100644
index 000000000..f8b26b3f6
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-load-generator-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:
+  name: titan-ccp-load-generator
+  labels:
+    app: titan-ccp-load-generator
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:
+    app: titan-ccp-load-generator
+  ports:
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml
index d01123b13..b60701af9 100644
--- a/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: jobmanager
-          image: ghcr.io/cau-se/theodolite-uc3-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc3-flink:flink-rm-rebalance-latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-cp-kafka:9092"
diff --git a/theodolite-benchmarks/definitions/uc3-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc3-flink/resources/taskmanager-deployment.yaml
index 495f97817..dac409b4c 100644
--- a/theodolite-benchmarks/definitions/uc3-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-flink/resources/taskmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: taskmanager
-          image: ghcr.io/cau-se/theodolite-uc3-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc3-flink:flink-rm-rebalance-latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-cp-kafka:9092"
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-deployment.yaml
new file mode 100644
index 000000000..2f797dc14
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-deployment.yaml
@@ -0,0 +1,68 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 2
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc3-hazelcastjet
+          imagePullPolicy: "Never"
+          ports:
+            - containerPort: 5555
+              name: jmx
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "my-confluent-cp-kafka:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://my-confluent-cp-schema-registry:8081"
+            - name: JAVA_OPTS
+              value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: WINDOW_SIZE_IN_SECONDS
+              value: "50"
+            - name: HOPPING_SIZE_IN_SECONDS
+              value: "1"
+            #- name: KUBERNETES_DNS_NAME
+            #  value: "titan-ccp-aggregation"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
+        #- name: prometheus-jmx-exporter
+        #  image: "solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143"
+        #  command:
+        #    - java
+        #    - -XX:+UnlockExperimentalVMOptions
+        #    - -XX:+UseCGroupMemoryLimitForHeap
+        #    - -XX:MaxRAMFraction=1
+        #    - -XshowSettings:vm
+        #    - -jar
+        #    - jmx_prometheus_httpserver.jar
+        #    - "5556"
+        #    - /etc/jmx-aggregation/jmx-kafka-prometheus.yml
+        #  ports:
+        #    - containerPort: 5556
+        #  volumeMounts:
+        #    - name: jmx-config
+        #      mountPath: /etc/jmx-aggregation
+      volumes:
+        - name: jmx-config
+          configMap:
+            name: aggregation-jmx-configmap
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-service.yaml
new file mode 100644
index 000000000..845ce7dd5
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-load-generator-deployment.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-load-generator-deployment.yaml
new file mode 100644
index 000000000..551f26ea4
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-load-generator-deployment.yaml
@@ -0,0 +1,32 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-load-generator
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-load-generator
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-load-generator
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: workload-generator
+          image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
+          ports:
+            - containerPort: 5701
+              name: coordination
+          env:
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-load-generator.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "my-confluent-cp-kafka:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://my-confluent-cp-schema-registry:8081"
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-load-generator-service.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-load-generator-service.yaml
new file mode 100644
index 000000000..f8b26b3f6
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-load-generator-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:
+  name: titan-ccp-load-generator
+  labels:
+    app: titan-ccp-load-generator
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:
+    app: titan-ccp-load-generator
+  ports:
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml
index 032499ea4..0fdcf7c41 100644
--- a/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: jobmanager
-          image: ghcr.io/cau-se/theodolite-uc4-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc4-flink:flink-rm-rebalance-latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-cp-kafka:9092"
diff --git a/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
index 7af13f20b..2ad71d666 100644
--- a/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: taskmanager
-          image: ghcr.io/cau-se/theodolite-uc4-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc4-flink:flink-rm-rebalance-latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-cp-kafka:9092"
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-deployment.yaml
new file mode 100644
index 000000000..2e95c7499
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-deployment.yaml
@@ -0,0 +1,66 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 2
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc4-hazelcastjet
+          imagePullPolicy: "Never"
+          ports:
+            - containerPort: 5555
+              name: jmx
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "my-confluent-cp-kafka:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://my-confluent-cp-schema-registry:8081"
+            - name: JAVA_OPTS
+              value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: WINDOW_SIZE
+              value: "5000"
+            #- name: KUBERNETES_DNS_NAME
+            #  value: "titan-ccp-aggregation"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
+        #- name: prometheus-jmx-exporter
+        #  image: "solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143"
+        #  command:
+        #    - java
+        #    - -XX:+UnlockExperimentalVMOptions
+        #    - -XX:+UseCGroupMemoryLimitForHeap
+        #    - -XX:MaxRAMFraction=1
+        #    - -XshowSettings:vm
+        #    - -jar
+        #    - jmx_prometheus_httpserver.jar
+        #    - "5556"
+        #    - /etc/jmx-aggregation/jmx-kafka-prometheus.yml
+        #  ports:
+        #    - containerPort: 5556
+        #  volumeMounts:
+        #    - name: jmx-config
+        #      mountPath: /etc/jmx-aggregation
+      volumes:
+        - name: jmx-config
+          configMap:
+            name: aggregation-jmx-configmap
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-service.yaml
new file mode 100644
index 000000000..845ce7dd5
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-load-generator-deployment.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-load-generator-deployment.yaml
new file mode 100644
index 000000000..7a69d13da
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-load-generator-deployment.yaml
@@ -0,0 +1,34 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-load-generator
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-load-generator
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-load-generator
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: workload-generator
+          image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
+          ports:
+            - containerPort: 5701
+              name: coordination
+          env:
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-load-generator.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "theodolite-cp-kafka:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://theodolite-cp-schema-registry:8081"
+            - name: NUM_NESTED_GROUPS
+              value: "5"
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-load-generator-service.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-load-generator-service.yaml
new file mode 100644
index 000000000..f8b26b3f6
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-load-generator-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:
+  name: titan-ccp-load-generator
+  labels:
+    app: titan-ccp-load-generator
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:
+    app: titan-ccp-load-generator
+  ports:
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/docker-test/uc1-hazelcastjet-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-hazelcastjet-docker-compose/docker-compose.yml
new file mode 100644
index 000000000..201b5ade8
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-hazelcastjet-docker-compose/docker-compose.yml
@@ -0,0 +1,73 @@
+version: '2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "9092"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    ports:
+      - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    expose:
+      - "8081"
+    ports:
+      - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  benchmark:
+    image: uc1-hazelcastjet
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+    ports:
+      - 5701:5701
+  benchmark2: 
+    image: uc1-hazelcastjet
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+  benchmark3: 
+    image: uc1-hazelcastjet
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: uc-wg:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc2-hazelcastjet-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-hazelcastjet-docker-compose/docker-compose.yml
new file mode 100644
index 000000000..a2d2494f7
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-hazelcastjet-docker-compose/docker-compose.yml
@@ -0,0 +1,66 @@
+version: '2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    ports:
+      - "2181:2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    ports:
+      - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    ports:
+      - "8081:8081"
+    expose:
+      - "8081"
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  benchmark:
+    image: uc2-hazelcastjet
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      DOWNSAMPLE_INTERVAL: 5000
+    ports:
+      - 5701:5701
+  benchmark2:
+    image: uc2-hazelcastjet
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      DOWNSAMPLE_INTERVAL: 5000
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: uc-wg:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc3-hazelcastjet-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-hazelcastjet-docker-compose/docker-compose.yml
new file mode 100644
index 000000000..75d6a7195
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-hazelcastjet-docker-compose/docker-compose.yml
@@ -0,0 +1,68 @@
+version: '2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    ports:
+      - "2181:2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    ports:
+      - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    ports:
+      - "8081:8081"
+    expose:
+      - "8081"
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  benchmark:
+    image: uc3-hazelcastjet
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      WINDOW_SIZE_IN_SECONDS: 50
+      HOPPING_SIZE_IN_SECONDS: 5
+    ports:
+      - 5701:5701
+  benchmark2:
+    image: uc3-hazelcastjet
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      WINDOW_SIZE_IN_SECONDS: 50
+      HOPPING_SIZE_IN_SECONDS: 5
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: uc-wg:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc4-hazelcastjet-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-hazelcastjet-docker-compose/docker-compose.yml
new file mode 100644
index 000000000..d747ba04c
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-hazelcastjet-docker-compose/docker-compose.yml
@@ -0,0 +1,57 @@
+version: '2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    ports:
+      - "2181:2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    ports:
+      - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    ports:
+      - "8081:8081"
+    expose:
+      - "8081"
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  benchmark:
+    image: uc4-hazelcastjet
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      WINDOW_SIZE_UC4: 5000
+    ports:
+      - 5701:5701
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: uc-wg:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 4
+      NUM_NESTED_GROUPS: 4
diff --git a/theodolite-benchmarks/hazelcastjet-commons/build.gradle b/theodolite-benchmarks/hazelcastjet-commons/build.gradle
new file mode 100644
index 000000000..4cf96af5f
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/build.gradle
@@ -0,0 +1,24 @@
+plugins {
+    id 'theodolite.java-commons'
+}
+
+repositories {
+  jcenter()
+  maven {
+    url "https://oss.sonatype.org/content/repositories/snapshots/"
+  }
+  maven {
+      url 'https://packages.confluent.io/maven/'
+  }
+}
+
+dependencies {
+	implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
+    implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
+    implementation 'com.hazelcast.jet:hazelcast-jet:4.5'
+    implementation 'com.hazelcast.jet:hazelcast-jet-kafka:4.5'
+    implementation 'com.hazelcast:hazelcast-kubernetes:2.2.2'
+    implementation 'io.confluent:kafka-avro-serializer:5.3.0'
+ 	implementation 'org.slf4j:slf4j-api:1.7.25'
+ 	 
+}
\ No newline at end of file
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/BenchmarkConfigBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/BenchmarkConfigBuilder.java
new file mode 100644
index 000000000..c271e6013
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/BenchmarkConfigBuilder.java
@@ -0,0 +1,64 @@
+package theodolite.commons.hazelcastjet;
+
+import com.hazelcast.config.Config;
+import com.hazelcast.config.JoinConfig;
+import org.slf4j.Logger;
+import theodolite.commons.hazelcastjet.ConfigurationKeys;
+
+public class BenchmarkConfigBuilder {
+
+  public Config buildFromEnv(final Logger logger, final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+
+    final String bootstrapServer = System.getenv(ConfigurationKeys.BOOTSTRAP_SERVER);
+    final String kubernetesDnsName = System.getenv(ConfigurationKeys.KUBERNETES_DNS_NAME);
+
+    ClusterConfig clusterConfig;
+    if (bootstrapServer != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromBootstrapServer(bootstrapServer);
+      logger.info("Use bootstrap server '{}'.", bootstrapServer);
+    } else if (kubernetesDnsName != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromKubernetesDnsName(kubernetesDnsName);
+      logger.info("Use Kubernetes DNS name '{}'.", kubernetesDnsName);
+    } else {
+      clusterConfig = ClusterConfig.fromBootstrapServer(bootstrapServerDefault);
+      logger.info(
+          "Neither a bootstrap server nor a Kubernetes DNS name was provided. Use default bootstrap server '{}'.", // NOCS
+          bootstrapServerDefault);
+    }
+
+    final String port = System.getenv(ConfigurationKeys.PORT);
+    if (port != null) {
+      clusterConfig.setPort(Integer.parseInt(port));
+    }
+
+    final String portAutoIncrement = System.getenv(ConfigurationKeys.PORT_AUTO_INCREMENT);
+    if (portAutoIncrement != null) {
+      clusterConfig.setPortAutoIncrement(Boolean.parseBoolean(portAutoIncrement));
+    }
+
+    final String clusterNamePrefix = System.getenv(ConfigurationKeys.CLUSTER_NAME_PREFIX);
+    if (clusterNamePrefix != null) {
+      clusterConfig.setClusterNamePrefix(clusterNamePrefix);
+    }
+
+    // Set network config for this hazelcast jet instance
+    final Config config = new Config()
+        .setClusterName(clusterConfig.getClusterNamePrefix());
+    final JoinConfig joinConfig = config.getNetworkConfig()
+        .setPort(clusterConfig.getPort())
+        .setPortAutoIncrement(clusterConfig.isPortAutoIncrement())
+        .getJoin();
+    joinConfig.getMulticastConfig().setEnabled(false);
+    if (clusterConfig.hasBootstrapServer()) {
+      joinConfig.getTcpIpConfig().addMember(clusterConfig.getBootstrapServer());
+    } else if (clusterConfig.hasKubernetesDnsName()) {
+      joinConfig.getKubernetesConfig()
+          .setEnabled(true)
+          .setProperty(hzKubernetesServiceDnsKey, clusterConfig.getKubernetesDnsName());
+    }
+
+    return config;
+  }
+
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/ClusterConfig.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/ClusterConfig.java
new file mode 100644
index 000000000..53af54d93
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/ClusterConfig.java
@@ -0,0 +1,76 @@
+package theodolite.commons.hazelcastjet;
+
+/**
+ * Configuration of a load generator cluster.
+ */
+public final class ClusterConfig {
+
+  private static final int PORT_DEFAULT = 5701;
+  private static final String CLUSTER_NAME_PREFIX_DEFAULT = "theodolite-hazelcastjet";
+
+  private final String bootstrapServer;
+  private final String kubernetesDnsName;
+  private int port = PORT_DEFAULT;
+  private boolean portAutoIncrement = true;
+  private String clusterNamePrefix = CLUSTER_NAME_PREFIX_DEFAULT;
+
+  /**
+   * Create a new {@link ClusterConfig} with the given parameter values.
+   */
+  private ClusterConfig(final String bootstrapServer, final String kubernetesDnsName) {
+    this.bootstrapServer = bootstrapServer;
+    this.kubernetesDnsName = kubernetesDnsName;
+  }
+
+  public boolean hasBootstrapServer() {
+    return this.bootstrapServer != null;
+  }
+
+  public String getBootstrapServer() {
+    return this.bootstrapServer;
+  }
+
+  public boolean hasKubernetesDnsName() {
+    return this.kubernetesDnsName != null;
+  }
+
+  public String getKubernetesDnsName() {
+    return this.kubernetesDnsName;
+  }
+
+  public int getPort() {
+    return this.port;
+  }
+
+  public boolean isPortAutoIncrement() {
+    return this.portAutoIncrement;
+  }
+
+  public ClusterConfig setPortAutoIncrement(final boolean portAutoIncrement) { // NOPMD
+    this.portAutoIncrement = portAutoIncrement;
+    return this;
+  }
+
+  public ClusterConfig setPort(final int port) { // NOPMD
+    this.port = port;
+    return this;
+  }
+
+  public String getClusterNamePrefix() {
+    return this.clusterNamePrefix;
+  }
+
+  public ClusterConfig setClusterNamePrefix(final String clusterNamePrefix) { // NOPMD
+    this.clusterNamePrefix = clusterNamePrefix;
+    return this;
+  }
+
+  public static ClusterConfig fromBootstrapServer(final String bootstrapServer) {
+    return new ClusterConfig(bootstrapServer, null);
+  }
+
+  public static ClusterConfig fromKubernetesDnsName(final String kubernetesDnsName) {
+    return new ClusterConfig(null, kubernetesDnsName);
+  }
+
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/ConfigurationKeys.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/ConfigurationKeys.java
new file mode 100644
index 000000000..85a1d355d
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/ConfigurationKeys.java
@@ -0,0 +1,30 @@
+package theodolite.commons.hazelcastjet;
+
+public class ConfigurationKeys {
+
+  // Common Keys
+  public static final String BOOTSTRAP_SERVER = "BOOTSTRAP_SERVER";
+  public static final String KUBERNETES_DNS_NAME = "KUBERNETES_DNS_NAME";
+  public static final String PORT = "PORT";
+  public static final String PORT_AUTO_INCREMENT = "PORT_AUTO_INCREMENT";
+  public static final String CLUSTER_NAME_PREFIX = "CLUSTER_NAME_PREFIX";
+  public static final String KAFKA_BOOTSTRAP_SERVERS = "KAFKA_BOOTSTRAP_SERVERS";
+  public static final String SCHEMA_REGISTRY_URL = "SCHEMA_REGISTRY_URL";
+  public static final String KAFKA_INPUT_TOPIC = "KAFKA_INPUT_TOPIC";
+
+  // Additional topics
+  public static final String KAFKA_OUTPUT_TOPIC = "KAFKA_OUTPUT_TOPIC";
+
+  // UC2
+  public static final String DOWNSAMPLE_INTERVAL = "DOWNSAMPLE_INTERVAL";
+
+  // UC3
+  public static final String WINDOW_SIZE_IN_SECONDS = "WINDOW_SIZE_IN_SECONDS";
+  public static final String HOPPING_SIZE_IN_SECONDS = "HOPPING_SIZE_IN_SECONDS";
+
+  // UC4
+  public static final String KAFKA_CONFIGURATION_TOPIC = "KAFKA_CONFIGURATION_TOPIC";
+  public static final String KAFKA_FEEDBACK_TOPIC = "KAFKA_FEEDBACK_TOPIC";
+  public static final String WINDOW_SIZE_UC4 = "WINDOW_SIZE";
+  
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/JetInstanceBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/JetInstanceBuilder.java
new file mode 100644
index 000000000..37c11bf4c
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/theodolite/commons/hazelcastjet/JetInstanceBuilder.java
@@ -0,0 +1,60 @@
+package theodolite.commons.hazelcastjet;
+
+import com.hazelcast.config.Config;
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import org.slf4j.Logger;
+import theodolite.commons.hazelcastjet.BenchmarkConfigBuilder;
+
+public class JetInstanceBuilder {
+
+  private Config config;
+
+  /**
+   * Set Hazelcast Config for the JetInstance to be built.
+   *
+   * @param hazelcastConfig Config for this JetInstance to be built.
+   * @return A Uc1JetInstanceBuilder with a set Config.
+   */
+  public JetInstanceBuilder setCustomConfig(final Config hazelcastConfig) {
+    this.config = hazelcastConfig;
+    return this;
+  }
+
+  /**
+   * Sets the ClusterConfig for this builder using the clusterConfigBuilder and environment
+   * variables.
+   *
+   * @param logger A specified logger to log procedures
+   * @param BootstrapServerDefault The default bootstrap server used in case no definition by the
+   *        environment is provided.
+   * @return The Uc1HazelcastJetBuilder factory with a set ClusterConfig.
+   */
+  public JetInstanceBuilder setConfigFromEnv(final Logger logger,
+      final String BootstrapServerDefault, final String hzKubernetesServiceDnsKey) {
+    // Use ClusterConfigBuilder to build a cluster config for this microservice
+    final BenchmarkConfigBuilder configBuilder = new BenchmarkConfigBuilder();
+    final Config config =
+        configBuilder.buildFromEnv(logger, BootstrapServerDefault, hzKubernetesServiceDnsKey);
+    this.config = config;
+    return this;
+  }
+
+  /**
+   * Builds and returns a JetInstance. If a config is set, the JetInstance will contain the set
+   * config.
+   *
+   * @return
+   */
+  public JetInstance build() {
+    final JetInstance jet = Jet.newJetInstance();
+    if (this.config != null) {
+      jet.getConfig().setHazelcastConfig(this.config);
+      return jet;
+    } else {
+      return jet;
+    }
+
+  }
+
+}
diff --git a/theodolite-benchmarks/settings.gradle b/theodolite-benchmarks/settings.gradle
index 5602e816b..b2b843136 100644
--- a/theodolite-benchmarks/settings.gradle
+++ b/theodolite-benchmarks/settings.gradle
@@ -3,19 +3,24 @@ rootProject.name = 'theodolite-benchmarks'
 include 'load-generator-commons'
 include 'kstreams-commons'
 include 'flink-commons'
+include 'hazelcastjet-commons'
 
 include 'uc1-load-generator'
 include 'uc1-kstreams'
 include 'uc1-flink'
+include 'uc1-hazelcastjet'
 
 include 'uc2-load-generator'
 include 'uc2-kstreams'
 include 'uc2-flink'
+include 'uc2-hazelcastjet'
 
 include 'uc3-load-generator'
 include 'uc3-kstreams'
 include 'uc3-flink'
+include 'uc3-hazelcastjet'
 
 include 'uc4-load-generator'
 include 'uc4-kstreams'
 include 'uc4-flink'
+include 'uc4-hazelcastjet'
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 000000000..fa98ca63d
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,127 @@
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.correct_indentation=true
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.organize_imports=true
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=15
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=;
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.correct_indentation=true
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.organize_imports=true
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=true
+sp_cleanup.remove_redundant_type_arguments=true
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 000000000..87860c815
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=true
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 000000000..efbcb8c9e
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=true
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc1-hazelcastjet/Dockerfile
new file mode 100644
index 000000000..7a0fcf7c5
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc1-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc1-hazelcastjet/bin/uc1-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/build.gradle b/theodolite-benchmarks/uc1-hazelcastjet/build.gradle
new file mode 100644
index 000000000..df120a50d
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/build.gradle
@@ -0,0 +1,6 @@
+plugins {
+  // hazelcastjet common stuff
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "theodolite.uc1.application.HistoryService"
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/HistoryService.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/HistoryService.java
new file mode 100644
index 000000000..4058d405b
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/HistoryService.java
@@ -0,0 +1,65 @@
+package theodolite.uc1.application;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  // Default Values of data used in UC1
+  // -- (default) kafkaPropeties data
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_BOOTSTRAP_SERVERS_DEFAULT = "localhost:19092"; // NOPMD
+  // -- (default) JetInstance data
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+  // -- (default) Kafka input topic data
+  private static final String KAFKA_TOPIC_DEFAULT = "input";
+  // -- (default) job name for this history serivce
+  private static final String JOB_NAME = "uc1-hazelcastjet";
+
+
+  /**
+   * Entrypoint for UC1 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc1HistoryService = new HistoryService();
+    try {
+      uc1HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      e.printStackTrace(); // NOPMD
+      System.out.println("An Exception occured. "// NOPMD
+          + "No history service is deployed! ABORT MISSION!");
+    }
+  }
+
+  /**
+   * Start a UC1 service.
+   *
+   * @throws Exception This Exception occurs if the Uc1HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC1 using the Uc1HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc1HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc1HazelcastJetFactory()
+        .setPropertiesFromEnv(KAFKA_BOOTSTRAP_SERVERS_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_TOPIC_DEFAULT)
+        .buildUc1Pipeline()
+        .buildUc1JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .runUc1Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1HazelcastJetFactory.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1HazelcastJetFactory.java
new file mode 100644
index 000000000..091faa371
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1HazelcastJetFactory.java
@@ -0,0 +1,172 @@
+package theodolite.uc1.application;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import java.util.Objects;
+import java.util.Properties;
+import org.slf4j.Logger;
+import theodolite.commons.hazelcastjet.ConfigurationKeys;
+import theodolite.commons.hazelcastjet.JetInstanceBuilder;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC1
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Properties and set the input topic which can be done using internal functions of this
+ * factory. Outside data only refers to custom values or default values in case data of the
+ * environment cannot the fetched.
+ */
+public class Uc1HazelcastJetFactory {
+
+  // Information per History Service
+  private Properties kafkaPropertiesForPipeline;
+  private String kafkaInputTopic;
+  private JetInstance uc1JetInstance;
+  private Pipeline uc1JetPipeline;
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
+   */
+  public void runUc1Job(final String jobName) throws Exception { // NOPMD
+    if (this.uc1JetInstance != null) {
+      if (this.uc1JetPipeline != null) {
+
+        // Adds the job name and joins a job to the JetInstance defined in this factory
+        final JobConfig jobConfig = new JobConfig();
+        jobConfig.setName(jobName);
+        this.uc1JetInstance.newJobIfAbsent(this.uc1JetPipeline, jobConfig).join();
+
+      } else {
+        throw new Exception(// NOPMD
+            "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC1.");
+      }
+    } else {
+      throw new Exception("Jet Instance is not set! " // NOPMD
+          + "Cannot start a hazelcast jet job for UC1.");
+    }
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc1HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc1HazelcastJetFactory buildUc1JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc1JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc1HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc1HazelcastJetFactory buildUc1Pipeline() throws Exception { // NOPMD
+    // Check for set properties and set input topic
+    if (this.kafkaPropertiesForPipeline != null) {
+      if (this.kafkaInputTopic != null) {
+
+        // Build Pipeline Using the pipelineBuilder
+        final Uc1PipelineBuilder pipeBuilder = new Uc1PipelineBuilder();
+        this.uc1JetPipeline =
+            pipeBuilder.build(this.kafkaPropertiesForPipeline, this.kafkaInputTopic);
+        // Return Uc1HazelcastJetBuilder factory
+        return this;
+
+      } else {
+        throw new Exception("Kafka input topic for pipeline not set! " // NOPMD
+            + "Cannot build pipeline.");
+      }
+    } else {
+      throw new Exception("Kafka Properties for pipeline not set! Cannot build pipeline."); // NOPMD
+    }
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka properties for pipeline used in this builder.
+   *
+   * @param kafkaProperties A propeties object containing necessary values used for the hazelcst jet
+   *        kafka connection.
+   * @return The Uc1HazelcastJetBuilder factory with set kafkaPropertiesForPipeline.
+   */
+  public Uc1HazelcastJetFactory setCustomProperties(final Properties kafkaProperties) { // NOPMD
+    this.kafkaPropertiesForPipeline = kafkaProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc1HazelcastJetBuilder factory with set kafkaPropertiesForPipeline.
+   */
+  public Uc1HazelcastJetFactory setPropertiesFromEnv(final String bootstrapServersDefault, // NOPMD
+      final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final Uc1KafkaPropertiesBuilder propsBuilder = new Uc1KafkaPropertiesBuilder();
+    final Properties kafkaProps =
+        propsBuilder.buildKafkaPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault);
+    this.kafkaPropertiesForPipeline = kafkaProps;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc1HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc1HazelcastJetFactory setCustomKafkaInputTopic(final String inputTopic) { // NOPMD
+    this.kafkaInputTopic = inputTopic;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc1HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc1HazelcastJetFactory setKafkaInputTopicFromEnv(final String defaultInputTopic) { // NOPMD
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    return this;
+  }
+
+
+
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1KafkaPropertiesBuilder.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1KafkaPropertiesBuilder.java
new file mode 100644
index 000000000..5df508cc7
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1KafkaPropertiesBuilder.java
@@ -0,0 +1,43 @@
+package theodolite.uc1.application;
+
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import theodolite.commons.hazelcastjet.ConfigurationKeys;
+
+/**
+ * Builds a Properties object containing the needed kafka properties used for the UC1 benchmark of
+ * Hazelcast Jet.
+ */
+public class Uc1KafkaPropertiesBuilder {
+
+  /**
+   * Builds Kafka Properties used for the UC1 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not net by envrionment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC1
+   *         Pipeline.
+   */
+  public Properties buildKafkaPropsFromEnv(final String kafkaBootstrapServerDefault,
+      final String schemaRegistryUrlDefault) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServers);
+    props.put("key.deserializer", StringDeserializer.class.getCanonicalName());
+    props.put("value.deserializer", KafkaAvroDeserializer.class);
+    props.put("specific.avro.reader", true);
+    props.put("schema.registry.url", schemaRegistryUrl);
+    props.setProperty("auto.offset.reset", "earliest");
+    return props;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1PipelineBuilder.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1PipelineBuilder.java
new file mode 100644
index 000000000..8090899f8
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/theodolite/uc1/application/Uc1PipelineBuilder.java
@@ -0,0 +1,61 @@
+package theodolite.uc1.application;
+
+import com.google.gson.Gson;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sink;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamSource;
+import java.util.Map.Entry;
+import java.util.Properties;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC1 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+public class Uc1PipelineBuilder {
+
+  private static final Gson GSON = new Gson();
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param inputSource A hazelcast jet stream-source for Entry<String,ActivePowerRecord> input
+   *        values.
+   * @return A hazelcast jet pipeline which processes data for Uc1.
+   */
+  public Pipeline build(final StreamSource<Entry<String, ActivePowerRecord>> inputSource,
+      final Sink<String> outputSink) {
+    // Build Pipeline
+    final Pipeline pipe = Pipeline.create();
+    pipe.readFrom(inputSource)
+        .withNativeTimestamps(0)
+        .setLocalParallelism(1)
+        .setName("Log content")
+        .map(record -> {
+          return GSON.toJson(record);
+        })
+        .writeTo(outputSink);
+
+    return pipe;
+  }
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaPropertiesForPipeline Properties Object containing the necessary kafka attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @return A hazelcast jet pipeline which processes data for Uc1.
+   */
+  public Pipeline build(final Properties kafkaPropertiesForPipeline, final String kafkaInputTopic) {
+
+    final StreamSource<Entry<String, ActivePowerRecord>> kafkaInputSource =
+        KafkaSources.<String, ActivePowerRecord>kafka(kafkaPropertiesForPipeline, kafkaInputTopic);
+
+    final Sink<String> loggerSink = Sinks.logger();
+    return this.build(kafkaInputSource, loggerSink);
+
+  }
+
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 000000000..e3371cc87
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/theodolite/uc1/application/Uc1PipelineTest.java b/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/theodolite/uc1/application/Uc1PipelineTest.java
new file mode 100644
index 000000000..aadeb10bd
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/theodolite/uc1/application/Uc1PipelineTest.java
@@ -0,0 +1,94 @@
+package theodolite.uc1.application;
+
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JetConfig;
+import com.hazelcast.jet.core.JetTestSupport;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
+import com.hazelcast.jet.pipeline.test.Assertions;
+import com.hazelcast.jet.pipeline.test.TestSources;
+import com.hazelcast.jet.test.SerialTest;
+import java.lang.reflect.Type;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.CompletionException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Test;
+import titan.ccp.model.records.ActivePowerRecord;
+
+@Category(SerialTest.class)
+public class Uc1PipelineTest extends JetTestSupport {
+
+  private static final Gson GSON = new Gson();
+  final Type entryStringRecordType = new TypeToken<Entry<String, ActivePowerRecord>>() {}.getType();
+
+  @Test
+  public void test1Uc1PipelineElements() {
+
+    // Test Configuration
+    final int TEST_ITEMS_PER_SECOND = 1;
+    final String TEST_SENSOR_NAME = "id_test1";
+    final Double TEST_VALUE_IN_W = 10.0;
+
+    // Create mock jet instance with configuration
+    final String testClusterName = randomName();
+    final JetConfig testJetConfig = new JetConfig();
+    testJetConfig.getHazelcastConfig().setClusterName(testClusterName);
+    final JetInstance testInstance = this.createJetMember(testJetConfig);
+
+    // Test Pipeline definition
+    final List<String> sourceRecord =
+        new ArrayList<>();
+    final StreamSource<Entry<String, ActivePowerRecord>> testSource =
+        TestSources.itemStream(TEST_ITEMS_PER_SECOND, (timestamp, item) -> {
+          final ActivePowerRecord testRecord =
+              new ActivePowerRecord(TEST_SENSOR_NAME, timestamp, TEST_VALUE_IN_W);
+          final Entry<String, ActivePowerRecord> testEntry =
+              Map.entry(TEST_SENSOR_NAME, testRecord);
+          sourceRecord.add(GSON.toJson(testEntry));
+          return testEntry;
+        });
+
+    // Recreation of the UC1 Pipeline - adjusted for a Hazelcast Jet Pipeline Test
+    final Pipeline testPipeline = Pipeline.create();
+    testPipeline
+        .readFrom(testSource)
+        .withNativeTimestamps(0)
+        .setLocalParallelism(1)
+        .setName("Log content")
+        .map(data -> {
+          return new Gson().toJson(data);
+        })
+        .apply(Assertions.assertCollectedEventually(6,
+            collection -> Assert.assertTrue("Not enough data arrived in the end",
+                collection.size() >= 5)));
+
+    // Test the UC1 Pipeline Recreation
+    try {
+      testInstance.newJob(testPipeline).join();
+      Assert.fail("Job should have completed with an AssertionCompletedException, " +
+          "but completed normally");
+    } catch (final CompletionException e) {
+      final String errorMsg = e.getCause().getMessage();
+      Assert.assertTrue(
+          "Job was expected to complete with AssertionCompletedException, but completed with: "
+              + e.getCause(),
+          errorMsg.contains(AssertionCompletedException.class.getName()));
+    }
+  }
+
+  @After
+  public void after() {
+    // Shuts down all running Jet Instances
+    Jet.shutdownAll();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc2-hazelcastjet/Dockerfile
new file mode 100644
index 000000000..4a7680e29
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc2-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc2-hazelcastjet/bin/uc2-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/build.gradle b/theodolite-benchmarks/uc2-hazelcastjet/build.gradle
new file mode 100644
index 000000000..0bb75eaf1
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/build.gradle
@@ -0,0 +1,6 @@
+plugins {
+  // common java conventions
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "theodolite.uc2.application.HistoryService"
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/HistoryService.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/HistoryService.java
new file mode 100644
index 000000000..7fc9c5913
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/HistoryService.java
@@ -0,0 +1,67 @@
+package theodolite.uc2.application;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+  // General Information (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+  private static final String KAFKA_BSERVER_DEFAULT = "localhost:19092";
+  // UC2 specific (default)
+  private static final String DOWNSAMPLE_INTERVAL_DEFAULT = "5000";
+  // -- (default) job name for this history serivce
+  private static final String JOB_NAME = "uc2-hazelcastjet";
+
+  /**
+   * Entrypoint for UC2 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc2HistoryService = new HistoryService();
+    try {
+      uc2HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      e.printStackTrace(); // NOPMD
+      System.out.println("An Exception occured. "// NOPMD
+          + "No history service is deployed! ABORT MISSION!");
+    }
+  }
+
+  /**
+   * Start a UC2 service.
+   *
+   * @throws Exception This Exception occurs if the Uc2HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC2 using the Uc1HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc2HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc2HazelcastJetFactory()
+        .setReadPropertiesFromEnv(KAFKA_BSERVER_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
+        .setWritePropertiesFromEnv(KAFKA_BSERVER_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
+        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
+        .setDownsampleIntervalFromEnv(DOWNSAMPLE_INTERVAL_DEFAULT)
+        .buildUc2Pipeline()
+        .buildUc2JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .runUc2Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2HazelcastJetFactory.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2HazelcastJetFactory.java
new file mode 100644
index 000000000..9d96d59a3
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2HazelcastJetFactory.java
@@ -0,0 +1,313 @@
+package theodolite.uc2.application;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import java.util.Objects;
+import java.util.Properties;
+import org.slf4j.Logger;
+import theodolite.commons.hazelcastjet.ConfigurationKeys;
+import theodolite.commons.hazelcastjet.JetInstanceBuilder;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC2
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Read and Write Properties, set the input and output topic, and set the downsample interval
+ * which can be done using internal functions of this factory. Outside data only refers to custom
+ * values or default values in case data of the environment cannot the fetched.
+ */
+public class Uc2HazelcastJetFactory {
+
+  // Information per History Service
+  private Properties kafkaReadPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  private JetInstance uc2JetInstance;
+  private Pipeline uc2JetPipeline;
+  // UC2 specific
+  private int downsampleInterval;
+
+  // Checkflags
+  private boolean readPropertiesSet;
+  private boolean writePropertiesSet;
+  private boolean inputTopicSet;
+  private boolean outputTopicSet;
+  private boolean pipelineSet;
+  private boolean jetInstanceSet;
+  private boolean downsampleIntervalSet;
+
+  /**
+   * Create a new Hazelcast Jet Factory for UC2.
+   */
+  public Uc2HazelcastJetFactory() {
+    this.readPropertiesSet = false;
+    this.writePropertiesSet = false;
+    this.inputTopicSet = false;
+    this.outputTopicSet = false;
+    this.pipelineSet = false;
+    this.jetInstanceSet = false;
+    this.downsampleIntervalSet = false;
+  }
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
+   */
+  public void runUc2Job(final String jobName) throws Exception { // NOPMD
+    if (this.jetInstanceSet) {
+      if (this.pipelineSet) {
+
+        // Adds the job name and joins a job to the JetInstance defined in this factory
+        final JobConfig jobConfig = new JobConfig();
+        jobConfig.setName(jobName);
+        this.uc2JetInstance.newJobIfAbsent(this.uc2JetPipeline, jobConfig).join();
+
+      } else {
+        throw new Exception(// NOPMD
+            "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC2.");
+      }
+    } else {
+      throw new Exception("Jet Instance is not set! " // NOPMD
+          + "Cannot start a hazelcast jet job for UC2.");
+    }
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc2HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc2HazelcastJetFactory buildUc2JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc2JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    this.jetInstanceSet = true;
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc2HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc2HazelcastJetFactory buildUc2Pipeline() throws Exception { // NOPMD
+    // Check for set properties and set input topic
+    if (this.readPropertiesSet) {
+      if (this.writePropertiesSet) {
+        if (this.inputTopicSet) {
+          if (this.outputTopicSet) {
+            if (this.downsampleIntervalSet) {
+              // Build Pipeline Using the pipelineBuilder
+              final Uc2PipelineBuilder pipeBuilder = new Uc2PipelineBuilder();
+              this.uc2JetPipeline =
+                  pipeBuilder.build(this.kafkaReadPropsForPipeline, this.kafkaWritePropsForPipeline,
+                      this.kafkaInputTopic, this.kafkaOutputTopic, this.downsampleInterval);
+              this.pipelineSet = true;
+              // Return Uc2HazelcastJetBuilder factory
+              return this;
+            } else {
+              throw new Exception("downsample interval for pipeline not set! " // NOPMD
+                  + "Cannot build pipeline."); // NOCS // NOPMD
+            }
+          } else {
+            throw new Exception("kafka output topic for pipeline not set! " // NOPMD
+                + "Cannot build pipeline."); // NOCS // NOPMD
+          }
+        } else {
+          throw new Exception("Kafka input topic for pipeline not set! " // NOPMD
+              + "Cannot build pipeline."); // NOCS // NOPMD
+        }
+      } else {
+        throw new Exception("Kafka Write Properties for pipeline not set! " // NOPMD
+            + "Cannot build pipeline."); // NOCS // NOPMD
+      }
+    } else {
+      throw new Exception("Kafka Read Properties for pipeline not set! " // NOPMD
+          + "Cannot build pipeline."); // NOCS // NOPMD
+    }
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder.
+   *
+   * @param kafkaReadProperties A propeties object containing necessary values used for the hazelcst
+   *        jet kafka connection to read data.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaReadPropsForPipeline.
+   */
+  public Uc2HazelcastJetFactory setCustomReadProperties(// NOPMD
+      final Properties kafkaReadProperties) {
+    this.kafkaReadPropsForPipeline = kafkaReadProperties;
+    this.readPropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder.
+   *
+   * @param kafkaWriteProperties A propeties object containing necessary values used for the
+   *        hazelcst jet kafka connection to write data.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaWritePropsForPipeline.
+   */
+  public Uc2HazelcastJetFactory setCustomWriteProperties(// NOPMD
+      final Properties kafkaWriteProperties) {
+    this.kafkaWritePropsForPipeline = kafkaWriteProperties;
+    this.writePropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
+   */
+  public Uc2HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault,
+      final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final Uc2KafkaPropertiesBuilder propsBuilder = new Uc2KafkaPropertiesBuilder();
+    final Properties kafkaReadProps =
+        propsBuilder.buildKafkaReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault);
+    this.kafkaReadPropsForPipeline = kafkaReadProps;
+    this.readPropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
+   */
+  public Uc2HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final Uc2KafkaPropertiesBuilder propsBuilder = new Uc2KafkaPropertiesBuilder();
+    final Properties kafkaWriteProps =
+        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault);
+    this.kafkaWritePropsForPipeline = kafkaWriteProps;
+    this.writePropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc2HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
+      final String inputTopic) {
+    this.kafkaInputTopic = inputTopic;
+    this.inputTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input output for the pipeline used in this builder.
+   *
+   * @param outputTopic The kafka topic used as the pipeline output.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc2HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
+    this.kafkaOutputTopic = outputTopic;
+    this.outputTopicSet = true;
+    return this;
+  }
+
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc2HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
+      final String defaultInputTopic) {
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    this.inputTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc2HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
+      final String defaultOutputTopic) {
+    this.kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        defaultOutputTopic);
+    this.outputTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the downsample interval for the pipeline used in this builder.
+   * 
+   * @param downsampleInterval the downsample interval to be used for this pipeline.
+   * @return A Uc2HazelcastJetFactory with a set downsampleInterval.
+   */
+  public Uc2HazelcastJetFactory setCustomDownsampleInterval(// NOPMD
+      final int downsampleInterval) {
+    this.downsampleInterval = downsampleInterval;
+    this.downsampleIntervalSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the downsample interval for the pipeline used in this builder from the environment.
+   * 
+   * @param defaultDownsampleInterval the default downsample interval to be used for this pipeline
+   *        when none is set in the environment.
+   * @return A Uc2HazelcastJetFactory with a set downsampleInterval.
+   */
+  public Uc2HazelcastJetFactory setDownsampleIntervalFromEnv(// NOPMD
+      final String defaultDownsampleInterval) {
+    final String downsampleInterval = (String) Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.DOWNSAMPLE_INTERVAL),
+        defaultDownsampleInterval);
+    final int downsampleIntervalNumber = Integer.parseInt(downsampleInterval);
+    this.downsampleInterval = downsampleIntervalNumber;
+    this.downsampleIntervalSet = true;
+    return this;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2KafkaPropertiesBuilder.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2KafkaPropertiesBuilder.java
new file mode 100644
index 000000000..beee6e1f3
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2KafkaPropertiesBuilder.java
@@ -0,0 +1,64 @@
+package theodolite.uc2.application;
+
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import theodolite.commons.hazelcastjet.ConfigurationKeys;
+
+/**
+ * Builds a read and write Properties objects containing the needed kafka properties used for the
+ * UC2 benchmark of Hazelcast Jet.
+ */
+public class Uc2KafkaPropertiesBuilder {
+
+  /**
+   * Builds Kafka Properties used for the UC2 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by envrionment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC2
+   *         Pipeline.
+   */
+  public Properties buildKafkaReadPropsFromEnv(final String kafkaBootstrapServerDefault,
+      final String schemaRegistryUrlDefault) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServers); //NOCS
+    props.put("key.deserializer", StringDeserializer.class.getCanonicalName());
+    props.put("value.deserializer", KafkaAvroDeserializer.class);
+    props.put("specific.avro.reader", true);
+    props.put("schema.registry.url", schemaRegistryUrl);
+    props.setProperty("auto.offset.reset", "latest");
+    return props;
+  }
+
+  /**
+   * Builds Kafka Properties used for the UC2 Benchmark pipeline.
+   * 
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by environment.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC2
+   *         Pipeline.
+   */
+  public Properties buildKafkaWritePropsFromEnv(final String kafkaBootstrapServerDefault) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServers); //NOCS
+    props.put("key.serializer", StringSerializer.class.getCanonicalName());
+    props.put("value.serializer", StringSerializer.class.getCanonicalName());
+    return props;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2PipelineBuilder.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2PipelineBuilder.java
new file mode 100644
index 000000000..a1c867bf3
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/application/Uc2PipelineBuilder.java
@@ -0,0 +1,63 @@
+package theodolite.uc2.application;
+
+import com.hazelcast.jet.aggregate.AggregateOperations;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.util.Map;
+import java.util.Properties;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC2 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+public class Uc2PipelineBuilder {
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
+   *        attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param downsampleInterval The window length of the tumbling window used in the aggregation of
+   *        this pipeline.
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC2.
+   */
+  public Pipeline build(final Properties kafkaReadPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline, final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final int downsampleInterval) {
+
+    final Pipeline pipe = Pipeline.create();
+    final StreamStage<Map.Entry<String, String>> mapProduct =
+        pipe.readFrom(KafkaSources.<String, ActivePowerRecord>kafka(
+            kafkaReadPropsForPipeline, kafkaInputTopic))
+            .withNativeTimestamps(0)
+            .setLocalParallelism(1)
+            .groupingKey(record -> record.getValue().getIdentifier())
+            .window(WindowDefinition.tumbling(downsampleInterval))
+            .aggregate(
+                AggregateOperations.averagingDouble(record -> record.getValue().getValueInW()))
+            .map(agg -> {
+              String theValue = agg.getValue().toString();
+              String theKey = agg.getKey().toString();
+              return Map.entry(theKey, theValue);
+            });
+    // Add Sink1: Logger
+    mapProduct.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    mapProduct.writeTo(KafkaSinks.<String, String>kafka(
+        kafkaWritePropsForPipeline, kafkaOutputTopic));
+
+    return pipe;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/ClusterConfig.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/ClusterConfig.java
new file mode 100644
index 000000000..1df097a09
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/ClusterConfig.java
@@ -0,0 +1,76 @@
+package theodolite.uc2.applicationold;
+
+/**
+ * Configuration of a load generator cluster.
+ */
+public final class ClusterConfig {
+
+  private static final int PORT_DEFAULT = 5701;
+  private static final String CLUSTER_NAME_PREFIX_DEFAULT = "theodolite-load-generation";
+
+  private final String bootstrapServer;
+  private final String kubernetesDnsName;
+  private int port = PORT_DEFAULT;
+  private boolean portAutoIncrement = true;
+  private String clusterNamePrefix = CLUSTER_NAME_PREFIX_DEFAULT;
+
+  /**
+   * Create a new {@link ClusterConfig} with the given parameter values.
+   */
+  private ClusterConfig(final String bootstrapServer, final String kubernetesDnsName) {
+    this.bootstrapServer = bootstrapServer;
+    this.kubernetesDnsName = kubernetesDnsName;
+  }
+
+  public boolean hasBootstrapServer() {
+    return this.bootstrapServer != null;
+  }
+
+  public String getBootstrapServer() {
+    return this.bootstrapServer;
+  }
+
+  public boolean hasKubernetesDnsName() {
+    return this.kubernetesDnsName != null;
+  }
+
+  public String getKubernetesDnsName() {
+    return this.kubernetesDnsName;
+  }
+
+  public int getPort() {
+    return this.port;
+  }
+
+  public boolean isPortAutoIncrement() {
+    return this.portAutoIncrement;
+  }
+
+  public ClusterConfig setPortAutoIncrement(final boolean portAutoIncrement) { // NOPMD
+    this.portAutoIncrement = portAutoIncrement;
+    return this;
+  }
+
+  public ClusterConfig setPort(final int port) { // NOPMD
+    this.port = port;
+    return this;
+  }
+
+  public String getClusterNamePrefix() {
+    return this.clusterNamePrefix;
+  }
+
+  public ClusterConfig setClusterNamePrefix(final String clusterNamePrefix) { // NOPMD
+    this.clusterNamePrefix = clusterNamePrefix;
+    return this;
+  }
+
+  public static ClusterConfig fromBootstrapServer(final String bootstrapServer) {
+    return new ClusterConfig(bootstrapServer, null);
+  }
+
+  public static ClusterConfig fromKubernetesDnsName(final String kubernetesDnsName) {
+    return new ClusterConfig(null, kubernetesDnsName);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/ConfigurationKeys.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/ConfigurationKeys.java
new file mode 100644
index 000000000..812922016
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/ConfigurationKeys.java
@@ -0,0 +1,44 @@
+package theodolite.uc2.applicationold;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class ConfigurationKeys {
+
+  public static final String BOOTSTRAP_SERVER = "BOOTSTRAP_SERVER";
+
+  public static final String KUBERNETES_DNS_NAME = "KUBERNETES_DNS_NAME";
+
+  public static final String PORT = "PORT";
+
+  public static final String PORT_AUTO_INCREMENT = "PORT_AUTO_INCREMENT";
+
+  public static final String CLUSTER_NAME_PREFIX = "CLUSTER_NAME_PREFIX";
+
+  public static final String NUM_SENSORS = "NUM_SENSORS";
+
+  public static final String PERIOD_MS = "PERIOD_MS";
+  
+  public static final String DOWNSAMPLE_INTERVAL = "DOWNSAMPLE_INTERVAL";
+
+  public static final String VALUE = "VALUE";
+
+  public static final String THREADS = "THREADS";
+
+  public static final String KAFKA_BOOTSTRAP_SERVERS = "KAFKA_BOOTSTRAP_SERVERS";
+
+  public static final String SCHEMA_REGISTRY_URL = "SCHEMA_REGISTRY_URL";
+
+  public static final String KAFKA_INPUT_TOPIC = "KAFKA_INPUT_TOPIC";
+  
+  public static final String KAFKA_OUTPUT_TOPIC = "KAFKA_OUTPUT_TOPIC";
+
+  public static final String KAFKA_BATCH_SIZE = "KAFKA_BATCH_SIZE";
+
+  public static final String KAFKA_LINGER_MS = "KAFKA_LINGER_MS";
+
+  public static final String KAFKA_BUFFER_MEMORY = "KAFKA_BUFFER_MEMORY";
+
+  private ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/HistoryService.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/HistoryService.java
new file mode 100644
index 000000000..3f43ff191
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/theodolite/uc2/applicationold/HistoryService.java
@@ -0,0 +1,264 @@
+package theodolite.uc2.applicationold;
+
+import com.hazelcast.config.Config;
+import com.hazelcast.config.JoinConfig;
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.aggregate.AggregateOperations;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+//import java.time.format.DateTimeFormatter;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+  
+  // Don't ask we why constant, because checkstyle
+  private static final String BOOTSTRAP_SERVERS = "bootstrap.servers";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+  //static final DateTimeFormatter TIME_FORMATTER_DEFAULT =
+  //    DateTimeFormatter.ofPattern("HH:mm:ss:SSS");
+
+  // General Information
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+  private static final String KAFKA_BSERVER_DEFAULT = "localhost:19092";
+  // UC2 specific
+  private static final String DOWNSAMPLE_INTERVAL_DEFAULT = "5000";
+
+  // Information per History Service
+  private ClusterConfig clusterConfig;
+  private Properties kafkaReadPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  // UC2 specific
+  private int downsampleInterval;
+
+
+  /** 
+   * Entrypoint for UC2 using Gradle Run. 
+   */
+  public static void main(final String[] args) {
+    HistoryService.loadHistoryService().run();
+  }
+
+  /** Build a history service object to run. */
+  public static HistoryService loadHistoryService() {
+    final String bootstrapServer = System.getenv(ConfigurationKeys.BOOTSTRAP_SERVER);
+    final String kubernetesDnsName = System.getenv(ConfigurationKeys.KUBERNETES_DNS_NAME);
+
+    ClusterConfig clusterConfig;
+    if (bootstrapServer != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromBootstrapServer(bootstrapServer);
+      LOGGER.info("Use bootstrap server '{}'.", bootstrapServer);
+    } else if (kubernetesDnsName != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromKubernetesDnsName(kubernetesDnsName);
+      LOGGER.info("Use Kubernetes DNS name '{}'", kubernetesDnsName);
+    } else {
+      clusterConfig = ClusterConfig.fromBootstrapServer(BOOTSTRAP_SERVER_DEFAULT);
+      LOGGER.info(// NOPMD
+          "Neitehr a bootstrap server nor a Kubernetes DNS name was provided." 
+          + "Use default bootstrap server '{}'",
+          BOOTSTRAP_SERVER_DEFAULT);
+    }
+
+    final String port = System.getenv(ConfigurationKeys.PORT);
+    if (port != null) {
+      clusterConfig.setPort(Integer.parseInt(port));
+    }
+
+    final String portAutoIncrement = System.getenv(ConfigurationKeys.PORT_AUTO_INCREMENT);
+    if (portAutoIncrement != null) {
+      clusterConfig.setPortAutoIncrement(Boolean.parseBoolean(portAutoIncrement));
+    }
+
+    final String clusterNamePrefix = System.getenv(ConfigurationKeys.CLUSTER_NAME_PREFIX);
+    if (clusterNamePrefix != null) {
+      clusterConfig.setClusterNamePrefix(clusterNamePrefix);
+    }
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        KAFKA_BSERVER_DEFAULT);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        SCHEMA_REGISTRY_URL_DEFAULT);
+    final Properties kafkaReadPropsForPipeline =
+        buildKafkaReadProps(kafkaBootstrapServers, schemaRegistryUrl);
+    final Properties kafkaWritePropsForPipeline =
+        buildKafkaWriteProps(kafkaBootstrapServers);
+
+    final String kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        KAFKA_INPUT_TOPIC_DEFAULT);
+    
+    final String kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        KAFKA_OUTPUT_TOPIC_DEFAULT);
+    
+    final String downsampleInterval = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.DOWNSAMPLE_INTERVAL),
+        DOWNSAMPLE_INTERVAL_DEFAULT);
+    final int downsampleIntervalNumber = Integer.parseInt(downsampleInterval);
+
+    return new HistoryService()
+        .setClusterConfig(clusterConfig)
+        .setKafkaReadPropertiesForPipeline(kafkaReadPropsForPipeline)
+        .setKafkaWritePropertiesForPipeline(kafkaWritePropsForPipeline)
+        .setKafkaInputTopic(kafkaInputTopic)
+        .setKafkaOutputTopic(kafkaOutputTopic)
+        .setDownsampleInterval(downsampleIntervalNumber);
+  }
+
+  /** Set Cluster Config when creating History Service. */
+  private HistoryService setClusterConfig(final ClusterConfig clusterConfig) { // NOPMD
+    this.clusterConfig = clusterConfig;
+    return this;
+  }
+
+  /** Set Pipeline Kafka Read Properties. */
+  private HistoryService setKafkaReadPropertiesForPipeline(// NOPMD
+      final Properties kafkaReadPropertiesForPipeline) {
+    this.kafkaReadPropsForPipeline = kafkaReadPropertiesForPipeline;
+    return this;
+  }
+  
+  /** Set Pipeline Kafka Write Properties. */
+  private HistoryService setKafkaWritePropertiesForPipeline(// NOPMD
+      final Properties kafkaWritePropsForPipeline) {
+    this.kafkaWritePropsForPipeline = kafkaWritePropsForPipeline;
+    return this;
+  }
+
+  /** Set Kafka Input topic used to build the pipeline. */
+  private HistoryService setKafkaInputTopic(final String kafkaInputTopic) { //NOPMD
+    this.kafkaInputTopic = kafkaInputTopic;
+    return this;
+  }
+  
+  /** Set Kafka Output topic used to build the pipeline. */
+  private HistoryService setKafkaOutputTopic(final String kafkaOutputTopic) { //NOPMD
+    this.kafkaOutputTopic = kafkaOutputTopic;
+    return this;
+  }
+  
+  /** Set the downsample Interval/Window used in this History Service. */
+  private HistoryService setDownsampleInterval(final int downsampleInterval) { //NOPMD
+    this.downsampleInterval = downsampleInterval;
+    return this;
+  }
+
+  /**
+   * Defines kafka properties used to fetch data from kafka using a Hazelcast Jet pipeline.
+   *
+   * @return properties used to fetch data from kafka using a Hazelcast Jet pipeline.
+   */
+  private static Properties buildKafkaReadProps(final String kafkaBootstrapServer,
+      final String schemaRegistryUrl) {
+    final Properties props = new Properties();
+    props.put(BOOTSTRAP_SERVERS, kafkaBootstrapServer);
+    props.put("key.deserializer", StringDeserializer.class.getCanonicalName());
+    props.put("value.deserializer", KafkaAvroDeserializer.class);
+    props.put("specific.avro.reader", true);
+    props.put("schema.registry.url", schemaRegistryUrl);
+    props.setProperty("auto.offset.reset", "latest");
+    return props;
+  }
+  
+  /**
+   * Defines kafka properties used to write data to kafka using a Hazelcast Jet pipeline.
+   *
+   * @return properties used to fetch data from kafka using a Hazelcast Jet pipeline.
+   */
+  private static Properties buildKafkaWriteProps(final String kafkaBootstrapServer) {
+    final Properties props = new Properties();
+    props.put(BOOTSTRAP_SERVERS, kafkaBootstrapServer);
+    props.put("key.serializer", StringSerializer.class.getCanonicalName());
+    props.put("value.serializer", StringSerializer.class.getCanonicalName());
+    return props;
+  }
+
+  /**
+   * Start the UC2 service.
+   */
+  public void run() {
+    Objects.requireNonNull(this.clusterConfig, "No cluster config set.");
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Build a pipeline and start a Hazelcast Jet Instance and add a job that uses the built pipeline.
+   */
+  private void createHazelcastJetApplication() {
+    
+    // Build Pipeline
+    final Pipeline pipeline = Pipeline.create();
+    final StreamStage<Map.Entry<String, String>> mapProduct =
+        pipeline.readFrom(KafkaSources.<String, ActivePowerRecord>kafka(
+                this.kafkaReadPropsForPipeline, this.kafkaInputTopic))
+            .withNativeTimestamps(0)
+            .setLocalParallelism(1)
+            .groupingKey(record -> record.getValue().getIdentifier())
+            .window(WindowDefinition.tumbling(this.downsampleInterval))
+            .aggregate(
+                AggregateOperations.averagingDouble(record -> record.getValue().getValueInW()))
+            .map(agg -> {
+              String theValue = agg.getValue().toString();
+              String theKey = agg.getKey().toString();
+              return Map.entry(theKey, theValue);
+            });
+    // Add Sink1: Logger
+    mapProduct.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    mapProduct.writeTo(KafkaSinks.<String, String>kafka(
+        this.kafkaWritePropsForPipeline, this.kafkaOutputTopic));
+    
+    // Set network config for this hazelcast jet instance
+    // Create Hazelcast Config
+    final Config config = new Config().setClusterName(this.clusterConfig.getClusterNamePrefix());
+    final JoinConfig joinConfig = config.getNetworkConfig()
+        .setPort(this.clusterConfig.getPort())
+        .setPortAutoIncrement(this.clusterConfig.isPortAutoIncrement())
+        .getJoin();
+    // Set either Bootstrap Server Member or establish Kubernetes Connection
+    joinConfig.getMulticastConfig().setEnabled(false);
+    if (this.clusterConfig.hasBootstrapServer()) {
+      joinConfig.getTcpIpConfig().addMember(this.clusterConfig.getBootstrapServer());
+    } else if (this.clusterConfig.hasKubernetesDnsName()) {
+      joinConfig.getKubernetesConfig()
+        .setEnabled(true)
+        .setProperty(HZ_KUBERNETES_SERVICE_DNS_KEY, this.clusterConfig.getKubernetesDnsName());
+    }
+
+    // Create Hazelcast Jet Instance
+    // Add config and add pipeline as the job
+    final JetInstance jet = Jet.newJetInstance();
+    jet.getConfig().setHazelcastConfig(config);
+    final JobConfig jobConfig = new JobConfig();
+    jobConfig.setName("uc2-hazelcastjet");
+    jet.newJobIfAbsent(pipeline, jobConfig).join();
+  }
+  
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 000000000..e3371cc87
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc3-hazelcastjet/Dockerfile
new file mode 100644
index 000000000..6dc99aeb7
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc3-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc3-hazelcastjet/bin/uc3-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/build.gradle b/theodolite-benchmarks/uc3-hazelcastjet/build.gradle
new file mode 100644
index 000000000..1f4870895
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/build.gradle
@@ -0,0 +1,6 @@
+plugins {
+  // common java conventions
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "theodolite.uc3.application.HistoryService"
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/HistoryService.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/HistoryService.java
new file mode 100644
index 000000000..3da1eccd7
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/HistoryService.java
@@ -0,0 +1,71 @@
+package theodolite.uc3.application;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+
+  // General Information
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+  private static final String KAFKA_BSERVERS_DEFAULT = "localhost:19092";
+  // UC3 specific
+  private static final String WINDOW_SIZE_IN_SECONDS_DEFAULT = "50";
+  private static final String HOPSIZE_IN_SEC_DEFAULT = "1";
+  // -- (default) job name for this history serivce
+  private static final String JOB_NAME = "uc3-hazelcastjet";
+
+
+  /**
+   * Entrypoint for UC3 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc3HistoryService = new HistoryService();
+    try {
+      uc3HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      e.printStackTrace(); // NOPMD
+      System.out.println("An Exception occured. "// NOPMD
+          + "No history service is deployed! ABORT MISSION!");
+    }
+  }
+  
+  /**
+   * Start a UC3 service.
+   *
+   * @throws Exception This Exception occurs if the Uc3HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+  
+  /**
+   * Creates a Hazelcast Jet Application for UC3 using the Uc3HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc3HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc3HazelcastJetFactory()
+        .setReadPropertiesFromEnv(KAFKA_BSERVERS_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
+        .setWritePropertiesFromEnv(KAFKA_BSERVERS_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
+        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
+        .setWindowSizeInSecondsFromEnv(WINDOW_SIZE_IN_SECONDS_DEFAULT)
+        .setHoppingSizeInSecondsFromEnv(HOPSIZE_IN_SEC_DEFAULT)
+        .buildUc3Pipeline()
+        .buildUc3JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .runUc3Job(JOB_NAME);
+  }
+  
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3HazelcastJetFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3HazelcastJetFactory.java
new file mode 100644
index 000000000..51612e622
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3HazelcastJetFactory.java
@@ -0,0 +1,358 @@
+package theodolite.uc3.application;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import java.util.Objects;
+import java.util.Properties;
+import org.slf4j.Logger;
+import theodolite.commons.hazelcastjet.ConfigurationKeys;
+import theodolite.commons.hazelcastjet.JetInstanceBuilder;
+import theodolite.uc3.application.uc3specifics.HourOfDayKey;
+import theodolite.uc3.application.uc3specifics.HourOfDayKeySerializer;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC3
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Read and Write Properties, set the input and output topic, and set the window size in seconds
+ * and the hopping size in seconds. This can be done using internal functions of this factory.
+ * Outside data only refers to custom values or default values in case data of the environment
+ * cannot the fetched.
+ */
+public class Uc3HazelcastJetFactory { //NOPMD
+
+  // Information per History Service
+  private Properties kafkaReadPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  private JetInstance uc3JetInstance;
+  private Pipeline uc3JetPipeline;
+  // UC3 specific
+  private int windowSizeInSeconds;
+  private int hoppingSizeInSeconds;
+
+  // Checkflags
+  private boolean readPropertiesSet;
+  private boolean writePropertiesSet;
+  private boolean inputTopicSet;
+  private boolean outputTopicSet;
+  private boolean pipelineSet;
+  private boolean jetInstanceSet;
+  private boolean windowSizeInSecondsSet;
+  private boolean hoppingSizeInSecondsSet;
+
+
+  /**
+   * Create a new Hazelcast Jet Factory for UC3.
+   */
+  public Uc3HazelcastJetFactory() {
+    this.readPropertiesSet = false;
+    this.writePropertiesSet = false;
+    this.inputTopicSet = false;
+    this.outputTopicSet = false;
+    this.pipelineSet = false;
+    this.jetInstanceSet = false;
+    this.windowSizeInSecondsSet = false;
+    this.hoppingSizeInSecondsSet = false;
+  }
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
+   */
+  public void runUc3Job(final String jobName) throws Exception { // NOPMD
+    if (this.jetInstanceSet) {
+      if (this.pipelineSet) {
+
+        // Adds the job name and joins a job to the JetInstance defined in this factory
+        final JobConfig jobConfig = new JobConfig()
+        .registerSerializer(HourOfDayKey.class, HourOfDayKeySerializer.class)
+        .setName(jobName);
+        this.uc3JetInstance.newJobIfAbsent(this.uc3JetPipeline, jobConfig).join();
+
+      } else {
+        throw new Exception(// NOPMD
+            "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC3.");
+      }
+    } else {
+      throw new Exception("Jet Instance is not set! " // NOPMD
+          + "Cannot start a hazelcast jet job for UC3.");
+    }
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc3HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc3HazelcastJetFactory buildUc3JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc3JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    this.jetInstanceSet = true;
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc3HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc3HazelcastJetFactory buildUc3Pipeline() throws Exception { // NOPMD
+    // Check for set properties and set input topic
+    if (this.readPropertiesSet) {
+      if (this.writePropertiesSet) {
+        if (this.inputTopicSet) {
+          if (this.outputTopicSet) {
+            if (this.windowSizeInSecondsSet) {
+              if (this.hoppingSizeInSecondsSet) {
+                // Build Pipeline Using the pipelineBuilder
+                final Uc3PipelineBuilder pipeBuilder = new Uc3PipelineBuilder();
+                this.uc3JetPipeline =
+                    pipeBuilder.build(this.kafkaReadPropsForPipeline,
+                        this.kafkaWritePropsForPipeline,
+                        this.kafkaInputTopic, this.kafkaOutputTopic, this.hoppingSizeInSeconds,
+                        this.windowSizeInSeconds);
+                this.pipelineSet = true;
+                // Return Uc3HazelcastJetBuilder factory
+                return this;
+              } else {
+                throw new Exception("hopping size in seconds for pipeline not set! " // NOPMD
+                    + "Cannot build pipeline."); // NOCS //NOPMD
+              }
+            } else {
+              throw new Exception("window size in seconds for pipeline not set! " // NOPMD
+                  + "Cannot build pipeline."); // NOCS // NOPMD
+            }
+          } else {
+            throw new Exception("kafka output topic for pipeline not set! " // NOPMD
+                + "Cannot build pipeline."); // NOCS // NOPMD
+          }
+        } else {
+          throw new Exception("Kafka input topic for pipeline not set! " // NOPMD
+              + "Cannot build pipeline."); // NOCS // NOPMD
+        }
+      } else {
+        throw new Exception("Kafka Write Properties for pipeline not set! " // NOPMD
+            + "Cannot build pipeline."); // NOCS // NOPMD
+      }
+    } else {
+      throw new Exception("Kafka Read Properties for pipeline not set! " // NOPMD
+          + "Cannot build pipeline."); // NOCS // NOPMD
+    }
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder.
+   *
+   * @param kafkaReadProperties A propeties object containing necessary values used for the hazelcst
+   *        jet kafka connection to read data.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaReadPropsForPipeline.
+   */
+  public Uc3HazelcastJetFactory setCustomReadProperties(// NOPMD
+      final Properties kafkaReadProperties) {
+    this.kafkaReadPropsForPipeline = kafkaReadProperties;
+    this.readPropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder.
+   *
+   * @param kafkaWriteProperties A propeties object containing necessary values used for the
+   *        hazelcst jet kafka connection to write data.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaWritePropsForPipeline.
+   */
+  public Uc3HazelcastJetFactory setCustomWriteProperties(// NOPMD
+      final Properties kafkaWriteProperties) {
+    this.kafkaWritePropsForPipeline = kafkaWriteProperties;
+    this.writePropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
+   */
+  public Uc3HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault,
+      final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final Uc3KafkaPropertiesBuilder propsBuilder = new Uc3KafkaPropertiesBuilder();
+    final Properties kafkaReadProps =
+        propsBuilder.buildKafkaReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault);
+    this.kafkaReadPropsForPipeline = kafkaReadProps;
+    this.readPropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
+   */
+  public Uc3HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final Uc3KafkaPropertiesBuilder propsBuilder = new Uc3KafkaPropertiesBuilder();
+    final Properties kafkaWriteProps =
+        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault);
+    this.kafkaWritePropsForPipeline = kafkaWriteProps;
+    this.writePropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc3HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
+      final String inputTopic) {
+    this.kafkaInputTopic = inputTopic;
+    this.inputTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input output for the pipeline used in this builder.
+   *
+   * @param outputTopic The kafka topic used as the pipeline output.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc3HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
+    this.kafkaOutputTopic = outputTopic;
+    this.outputTopicSet = true;
+    return this;
+  }
+
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc3HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
+      final String defaultInputTopic) {
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    this.inputTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc3HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
+      final String defaultOutputTopic) {
+    this.kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        defaultOutputTopic);
+    this.outputTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the window size in seconds for the pipeline used in this builder.
+   * 
+   * @param windowSizeInSeconds the windowSizeInSeconds to be used for this pipeline.
+   * @return A Uc3HazelcastJetFactory with a set windowSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setCustomWindowSizeInSeconds(// NOPMD
+      final int windowSizeInSeconds) {
+    this.windowSizeInSeconds = windowSizeInSeconds;
+    this.windowSizeInSecondsSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the window size in seconds for the pipeline used in this builder from the environment.
+   * 
+   * @param defaultWindowSizeInSeconds the default window size in seconds to be used for this
+   *        pipeline when none is set in the environment.
+   * @return A Uc3HazelcastJetFactory with a set windowSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setWindowSizeInSecondsFromEnv(// NOPMD
+      final String defaultWindowSizeInSeconds) {
+    final String windowSizeInSeconds = (String) Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.WINDOW_SIZE_IN_SECONDS),
+        defaultWindowSizeInSeconds);
+    final int windowSizeInSecondsNumber = Integer.parseInt(windowSizeInSeconds);
+    this.windowSizeInSeconds = windowSizeInSecondsNumber;
+    this.windowSizeInSecondsSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the hopping size in seconds for the pipeline used in this builder.
+   * 
+   * @param hoppingSizeInSeconds the hoppingSizeInSeconds to be used for this pipeline.
+   * @return A Uc3HazelcastJetFactory with a set hoppingSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setCustomHoppingSizeInSeconds(// NOPMD
+      final int hoppingSizeInSeconds) {
+    this.hoppingSizeInSeconds = hoppingSizeInSeconds;
+    this.hoppingSizeInSecondsSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the hopping size in seconds for the pipeline used in this builder from the environment.
+   * 
+   * @param defaultHoppingSizeInSeconds the default hopping size in seconds to be used for this
+   *        pipeline when none is set in the environment.
+   * @return A Uc3HazelcastJetFactory with a set hoppingSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setHoppingSizeInSecondsFromEnv(// NOPMD
+      final String defaultHoppingSizeInSeconds) {
+    final String hoppingSizeInSeconds = (String) Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.HOPPING_SIZE_IN_SECONDS),
+        defaultHoppingSizeInSeconds);
+    final int hoppingSizeInSecondsNumber = Integer.parseInt(hoppingSizeInSeconds);
+    this.hoppingSizeInSeconds = hoppingSizeInSecondsNumber;
+    this.hoppingSizeInSecondsSet = true;
+    return this;
+  }
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3KafkaPropertiesBuilder.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3KafkaPropertiesBuilder.java
new file mode 100644
index 000000000..b3600257d
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3KafkaPropertiesBuilder.java
@@ -0,0 +1,64 @@
+package theodolite.uc3.application;
+
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import theodolite.commons.hazelcastjet.ConfigurationKeys;
+
+/**
+ * Builds a read and write Properties objects containing the needed kafka properties used for the
+ * UC3 benchmark of Hazelcast Jet.
+ */
+public class Uc3KafkaPropertiesBuilder {
+
+  /**
+   * Builds Kafka Properties used for the UC3 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by envrionment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC3
+   *         Pipeline.
+   */
+  public Properties buildKafkaReadPropsFromEnv(final String kafkaBootstrapServerDefault,
+      final String schemaRegistryUrlDefault) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServers); // NOCS
+    props.put("key.deserializer", StringDeserializer.class.getCanonicalName());
+    props.put("value.deserializer", KafkaAvroDeserializer.class);
+    props.put("specific.avro.reader", true);
+    props.put("schema.registry.url", schemaRegistryUrl);
+    props.setProperty("auto.offset.reset", "earliest");
+    return props;
+  }
+
+  /**
+   * Builds Kafka Properties used for the UC3 Benchmark pipeline.
+   * 
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by environment.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC3
+   *         Pipeline.
+   */
+  public Properties buildKafkaWritePropsFromEnv(final String kafkaBootstrapServerDefault) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServers); // NOCS
+    props.put("key.serializer", StringSerializer.class.getCanonicalName());
+    props.put("value.serializer", StringSerializer.class.getCanonicalName());
+    return props;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3PipelineBuilder.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3PipelineBuilder.java
new file mode 100644
index 000000000..938fdc7ab
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/Uc3PipelineBuilder.java
@@ -0,0 +1,94 @@
+package theodolite.uc3.application;
+
+import com.hazelcast.jet.aggregate.AggregateOperations;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.util.Map;
+import java.util.Properties;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+import theodolite.uc3.application.uc3specifics.HourOfDayKey;
+import theodolite.uc3.application.uc3specifics.HoursOfDayKeyFactory;
+import theodolite.uc3.application.uc3specifics.StatsKeyFactory;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC3 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+public class Uc3PipelineBuilder {
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
+   *        attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param hoppingSizeInSeconds The hop length of the sliding window used in the aggregation of
+   *        this pipeline.
+   * @param windowSizeInSeconds The window length of the sliding window used in the aggregation of
+   *        this pipeline.
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC3.
+   */
+  public Pipeline build(final Properties kafkaReadPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline, final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final int hoppingSizeInSeconds, final int windowSizeInSeconds) {
+
+    // Build Pipeline for the History Service of UC3
+    final Pipeline pipe = Pipeline.create();
+    final StreamStage<Map.Entry<String, String>> mapProduct =
+        pipe
+            .readFrom(KafkaSources
+                .<String, ActivePowerRecord>kafka(
+                    kafkaReadPropsForPipeline, kafkaInputTopic))
+            // use Timestamps
+            .withNativeTimestamps(0)
+            .setLocalParallelism(1)
+            // Map timestamp to hour of day and create new key using sensorID and
+            // datetime mapped to HourOfDay
+            .map(record -> {
+              String sensorId = record.getValue().getIdentifier();
+              long timestamp = record.getValue().getTimestamp();
+              LocalDateTime dateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp),
+                  TimeZone.getDefault().toZoneId());
+
+              final StatsKeyFactory<HourOfDayKey> keyFactory = new HoursOfDayKeyFactory();
+              HourOfDayKey newKey = keyFactory.createKey(sensorId, dateTime);
+
+              return Map.entry(newKey, record.getValue());
+            })
+            // group by new keys
+            .groupingKey(newRecord -> newRecord.getKey())
+            // Sliding/Hopping Window
+            .window(WindowDefinition.sliding(TimeUnit.SECONDS.toMillis(windowSizeInSeconds),
+                TimeUnit.SECONDS.toMillis(hoppingSizeInSeconds)))
+            // get average value of group (sensoreId,hourOfDay)
+            .aggregate(
+                AggregateOperations.averagingDouble(record -> record.getValue().getValueInW()))
+            // map to return pair (sensorID,hourOfDay) -> (averaged what value)
+            .map(agg -> {
+              String theValue = agg.getValue().toString();
+              String theKey = agg.getKey().toString();
+              return Map.entry(theKey, theValue);
+            });
+    // Add Sink1: Logger
+    mapProduct.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    mapProduct.writeTo(KafkaSinks.<String, String>kafka(
+        kafkaWritePropsForPipeline, kafkaOutputTopic));
+
+    return pipe;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HourOfDayKey.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HourOfDayKey.java
new file mode 100644
index 000000000..a619a985b
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HourOfDayKey.java
@@ -0,0 +1,47 @@
+package theodolite.uc3.application.uc3specifics;
+
+import java.util.Objects;
+
+/** A key consisting of a hour of a day and a sensorID. */
+public class HourOfDayKey {
+
+  private final int hourOfDay;
+  private final String sensorId;
+  
+  public HourOfDayKey(final int hourOfDay, final String sensorId) {
+    this.hourOfDay = hourOfDay;
+    this.sensorId = sensorId;
+  }
+  
+  public int getHourOfDay() {
+    return this.hourOfDay;
+  }
+  
+  public String getSensorId() {
+    return this.sensorId;
+  }
+  
+  @Override
+  public String toString() {
+    return this.sensorId + ";" + this.hourOfDay;
+  }
+  
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.hourOfDay, this.sensorId);
+  }
+  
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (obj instanceof HourOfDayKey) {
+      final HourOfDayKey other = (HourOfDayKey) obj;
+      return Objects.equals(this.hourOfDay, other.hourOfDay)
+          && Objects.equals(this.sensorId, other.sensorId);
+    }
+    return false;
+  }
+  
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HourOfDayKeySerializer.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HourOfDayKeySerializer.java
new file mode 100644
index 000000000..cafe50875
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HourOfDayKeySerializer.java
@@ -0,0 +1,29 @@
+package theodolite.uc3.application.uc3specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+
+/** A pipeline serializer for the HourOfDayKey to allow for parallelization. */
+public class HourOfDayKeySerializer implements StreamSerializer<HourOfDayKey> {
+
+  private static final int TYPE_ID = 1;
+  
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final HourOfDayKey key) throws IOException {
+    out.writeInt(key.getHourOfDay());
+    out.writeUTF(key.getSensorId()); 
+  }
+
+  @Override
+  public HourOfDayKey read(final ObjectDataInput in) throws IOException {
+    return new HourOfDayKey(in.readInt(), in.readUTF());
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HoursOfDayKeyFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HoursOfDayKeyFactory.java
new file mode 100644
index 000000000..2f9ce999e
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/HoursOfDayKeyFactory.java
@@ -0,0 +1,19 @@
+package theodolite.uc3.application.uc3specifics;
+
+import java.time.LocalDateTime;
+
+/** A factory class to build a HourOfDayKey. */
+public class HoursOfDayKeyFactory implements StatsKeyFactory<HourOfDayKey> {
+
+  @Override
+  public HourOfDayKey createKey(final String sensorId, final LocalDateTime dateTime) {
+    final int hourOfDay = dateTime.getHour();
+    return new HourOfDayKey(hourOfDay, sensorId);
+  }
+
+  @Override
+  public String getSensorId(final HourOfDayKey key) {
+    return key.getSensorId();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/StatsKeyFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/StatsKeyFactory.java
new file mode 100644
index 000000000..eda585c6e
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/application/uc3specifics/StatsKeyFactory.java
@@ -0,0 +1,17 @@
+package theodolite.uc3.application.uc3specifics;
+
+import java.time.LocalDateTime;
+
+/**
+ * Factory interface for creating a stats key from a sensor id and a {@link LocalDateTime} object
+ * and vice versa.
+ *
+ * @param <T> Type of the key
+ */
+public interface StatsKeyFactory<T> {
+
+  T createKey(String sensorId, LocalDateTime dateTime);
+  
+  String getSensorId(T key);
+  
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/ClusterConfig.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/ClusterConfig.java
new file mode 100644
index 000000000..923749478
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/ClusterConfig.java
@@ -0,0 +1,76 @@
+package theodolite.uc3.applicationold;
+
+/**
+ * Configuration of a load generator cluster.
+ */
+public final class ClusterConfig {
+
+  private static final int PORT_DEFAULT = 5701;
+  private static final String CLUSTER_NAME_PREFIX_DEFAULT = "theodolite-load-generation";
+
+  private final String bootstrapServer;
+  private final String kubernetesDnsName;
+  private int port = PORT_DEFAULT;
+  private boolean portAutoIncrement = true;
+  private String clusterNamePrefix = CLUSTER_NAME_PREFIX_DEFAULT;
+
+  /**
+   * Create a new {@link ClusterConfig} with the given parameter values.
+   */
+  private ClusterConfig(final String bootstrapServer, final String kubernetesDnsName) {
+    this.bootstrapServer = bootstrapServer;
+    this.kubernetesDnsName = kubernetesDnsName;
+  }
+
+  public boolean hasBootstrapServer() {
+    return this.bootstrapServer != null;
+  }
+
+  public String getBootstrapServer() {
+    return this.bootstrapServer;
+  }
+
+  public boolean hasKubernetesDnsName() {
+    return this.kubernetesDnsName != null;
+  }
+
+  public String getKubernetesDnsName() {
+    return this.kubernetesDnsName;
+  }
+
+  public int getPort() {
+    return this.port;
+  }
+
+  public boolean isPortAutoIncrement() {
+    return this.portAutoIncrement;
+  }
+
+  public ClusterConfig setPortAutoIncrement(final boolean portAutoIncrement) { // NOPMD
+    this.portAutoIncrement = portAutoIncrement;
+    return this;
+  }
+
+  public ClusterConfig setPort(final int port) { // NOPMD
+    this.port = port;
+    return this;
+  }
+
+  public String getClusterNamePrefix() {
+    return this.clusterNamePrefix;
+  }
+
+  public ClusterConfig setClusterNamePrefix(final String clusterNamePrefix) { // NOPMD
+    this.clusterNamePrefix = clusterNamePrefix;
+    return this;
+  }
+
+  public static ClusterConfig fromBootstrapServer(final String bootstrapServer) {
+    return new ClusterConfig(bootstrapServer, null);
+  }
+
+  public static ClusterConfig fromKubernetesDnsName(final String kubernetesDnsName) {
+    return new ClusterConfig(null, kubernetesDnsName);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/ConfigurationKeys.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/ConfigurationKeys.java
new file mode 100644
index 000000000..020213ad7
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/ConfigurationKeys.java
@@ -0,0 +1,48 @@
+package theodolite.uc3.applicationold;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class ConfigurationKeys {
+
+  public static final String BOOTSTRAP_SERVER = "BOOTSTRAP_SERVER";
+
+  public static final String KUBERNETES_DNS_NAME = "KUBERNETES_DNS_NAME";
+
+  public static final String PORT = "PORT";
+
+  public static final String PORT_AUTO_INCREMENT = "PORT_AUTO_INCREMENT";
+
+  public static final String CLUSTER_NAME_PREFIX = "CLUSTER_NAME_PREFIX";
+
+  public static final String NUM_SENSORS = "NUM_SENSORS";
+
+  public static final String PERIOD_MS = "PERIOD_MS";
+  
+  public static final String DOWNSAMPLE_INTERVAL = "DOWNSAMPLE_INTERVAL";
+  
+  public static final String WINDOW_SIZE_IN_SECONDS = "WINDOW_SIZE_IN_SECONDS";
+  
+  public static final String HOPPING_SIZE_IN_SECONDS = "HOPPING_SIZE_IN_SECONDS";
+
+  public static final String VALUE = "VALUE";
+
+  public static final String THREADS = "THREADS";
+
+  public static final String KAFKA_BOOTSTRAP_SERVERS = "KAFKA_BOOTSTRAP_SERVERS";
+
+  public static final String SCHEMA_REGISTRY_URL = "SCHEMA_REGISTRY_URL";
+
+  public static final String KAFKA_INPUT_TOPIC = "KAFKA_INPUT_TOPIC";
+  
+  public static final String KAFKA_OUTPUT_TOPIC = "KAFKA_OUTPUT_TOPIC";
+
+  public static final String KAFKA_BATCH_SIZE = "KAFKA_BATCH_SIZE";
+
+  public static final String KAFKA_LINGER_MS = "KAFKA_LINGER_MS";
+
+  public static final String KAFKA_BUFFER_MEMORY = "KAFKA_BUFFER_MEMORY";
+
+  private ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/HistoryService.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/HistoryService.java
new file mode 100644
index 000000000..afa08c576
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/theodolite/uc3/applicationold/HistoryService.java
@@ -0,0 +1,303 @@
+package theodolite.uc3.applicationold;
+
+import com.hazelcast.config.Config;
+import com.hazelcast.config.JoinConfig;
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.aggregate.AggregateOperations;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.time.*; // NOCS
+//import java.time.format.DateTimeFormatter;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Properties;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import theodolite.uc3.application.uc3specifics.HourOfDayKey;
+import theodolite.uc3.application.uc3specifics.HourOfDayKeySerializer;
+import theodolite.uc3.application.uc3specifics.HoursOfDayKeyFactory;
+import theodolite.uc3.application.uc3specifics.StatsKeyFactory;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+  //private static final DateTimeFormatter TIME_FORMATTER_DEFAULT =
+  //    DateTimeFormatter.ofPattern("HH:mm:ss:SSS");
+
+  // General Information
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+  private static final String KAFKA_BSERVERS_DEFAULT = "localhost:19092";
+  // UC3 specific
+  private static final String WINDOW_SIZE_IN_SECONDS_DEFAULT = "50";
+  private static final String HOPSIZE_IN_SEC_DEFAULT = "1";
+
+
+  // Information per History Service
+  private ClusterConfig clusterConfig;
+  private Properties kafkaReadPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  // UC3 specific
+  private int windowSizeInSeconds;
+  private int hoppingSizeInSeconds;
+
+  /**
+   * Entrypoint for UC3 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    HistoryService.loadHistoryService().run();
+  }
+
+  /** Build a history service object to run. */
+  public static HistoryService loadHistoryService() {
+    final String bootstrapServer = System.getenv(ConfigurationKeys.BOOTSTRAP_SERVER);
+    final String kubernetesDnsName = System.getenv(ConfigurationKeys.KUBERNETES_DNS_NAME);
+
+    ClusterConfig clusterConfig;
+    if (bootstrapServer != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromBootstrapServer(bootstrapServer);
+      LOGGER.info("Use bootstrap server '{}'.", bootstrapServer);
+    } else if (kubernetesDnsName != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromKubernetesDnsName(kubernetesDnsName);
+      LOGGER.info("Use Kubernetes DNC name '{}'", kubernetesDnsName);
+    } else {
+      clusterConfig = ClusterConfig.fromBootstrapServer(BOOTSTRAP_SERVER_DEFAULT);
+      LOGGER.info(// NOPMD
+          "Neitehr a bootstrap server nor a Kubernetes DNS name was provided." 
+          + "Use default bootstrap server '{}'",
+          BOOTSTRAP_SERVER_DEFAULT);
+    }
+
+    final String port = System.getenv(ConfigurationKeys.PORT);
+    if (port != null) {
+      clusterConfig.setPort(Integer.parseInt(port));
+    }
+
+    final String portAutoIncrement = System.getenv(ConfigurationKeys.PORT_AUTO_INCREMENT);
+    if (portAutoIncrement != null) {
+      clusterConfig.setPortAutoIncrement(Boolean.parseBoolean(portAutoIncrement));
+    }
+
+    final String clusterNamePrefix = System.getenv(ConfigurationKeys.CLUSTER_NAME_PREFIX);
+    if (clusterNamePrefix != null) {
+      clusterConfig.setClusterNamePrefix(clusterNamePrefix);
+    }
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        KAFKA_BSERVERS_DEFAULT);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        SCHEMA_REGISTRY_URL_DEFAULT);
+    final Properties kafkaReadPropsForPipeline =
+        buildKafkaReadProps(kafkaBootstrapServers, schemaRegistryUrl);
+    final Properties kafkaWritePropsForPipeline =
+        buildKafkaWriteProps(kafkaBootstrapServers);
+
+    final String kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        KAFKA_INPUT_TOPIC_DEFAULT);
+
+    final String kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        KAFKA_OUTPUT_TOPIC_DEFAULT);
+
+    final String windowSizeInSeconds = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.WINDOW_SIZE_IN_SECONDS),
+        WINDOW_SIZE_IN_SECONDS_DEFAULT);
+    final int windowSizeInSecondsNumber = Integer.parseInt(windowSizeInSeconds);
+
+    final String hoppingSizeInSeconds = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.HOPPING_SIZE_IN_SECONDS),
+        HOPSIZE_IN_SEC_DEFAULT);
+    final int hoppingSizeInSecondsNumber = Integer.parseInt(hoppingSizeInSeconds);
+
+    return new HistoryService()
+        .setClusterConfig(clusterConfig)
+        .setKafkaReadPropertiesForPipeline(kafkaReadPropsForPipeline)
+        .setKafkaWritePropertiesForPipeline(kafkaWritePropsForPipeline)
+        .setKafkaInputTopic(kafkaInputTopic)
+        .setKafkaOutputTopic(kafkaOutputTopic)
+        .setWindowSizeInSeconds(windowSizeInSecondsNumber)
+        .setHoppingSizeInSeconds(hoppingSizeInSecondsNumber);
+  }
+
+  /** Set Cluster Config when creating History Service. */
+  private HistoryService setClusterConfig(final ClusterConfig clusterConfig) { // NOPMD
+    this.clusterConfig = clusterConfig;
+    return this;
+  }
+
+  /** Set Pipeline Kafka Read Properties. */
+  private HistoryService setKafkaReadPropertiesForPipeline(// NOPMD
+      final Properties kafkaReadPropsForPipeline) {
+    this.kafkaReadPropsForPipeline = kafkaReadPropsForPipeline;
+    return this;
+  }
+
+  /** Set Pipeline Kafka Write Properties. */
+  private HistoryService setKafkaWritePropertiesForPipeline(// NOPMD
+      final Properties kafkaWritePropsForPipeline) {
+    this.kafkaWritePropsForPipeline = kafkaWritePropsForPipeline;
+    return this;
+  }
+
+  /** Set Kafka Input topic used to build the pipeline. */
+  private HistoryService setKafkaInputTopic(final String kafkaInputTopic) { // NOPMD
+    this.kafkaInputTopic = kafkaInputTopic;
+    return this;
+  }
+
+  /** Set Kafka Output topic used to build the pipeline. */
+  private HistoryService setKafkaOutputTopic(final String kafkaOutputTopic) { // NOPMD
+    this.kafkaOutputTopic = kafkaOutputTopic;
+    return this;
+  }
+
+  /** Set the window size used in this history service (given in seconds). */
+  private HistoryService setWindowSizeInSeconds(final int windowSizeInSeconds) { // NOPMD
+    this.windowSizeInSeconds = windowSizeInSeconds;
+    return this;
+  }
+
+  /** Set the hopping size used in this history service (given in seconds). */
+  private HistoryService setHoppingSizeInSeconds(final int hoppingSizeInSeconds) { // NOPMD
+    this.hoppingSizeInSeconds = hoppingSizeInSeconds;
+    return this;
+  }
+
+  /**
+   * Defines kafka properties used to fetch data from kafka using a Hazelcast Jet pipeline.
+   *
+   * @return properties used to fetch data from kafka using a Hazelcast Jet pipeline.
+   */
+  private static Properties buildKafkaReadProps(final String kafkaBootstrapServer,
+      final String schemaRegistryUrl) {
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServer); // NOCS
+    props.put("key.deserializer", StringDeserializer.class.getCanonicalName());
+    props.put("value.deserializer", KafkaAvroDeserializer.class);
+    props.put("specific.avro.reader", true);
+    props.put("schema.registry.url", schemaRegistryUrl);
+    props.setProperty("auto.offset.reset", "earliest");
+    return props;
+  }
+
+  /**
+   * Defines kafka properties used to write data to kafka using a Hazelcast Jet pipeline.
+   *
+   * @return properties used to fetch data from kafka using a Hazelcast Jet pipeline.
+   */
+  private static Properties buildKafkaWriteProps(final String kafkaBootstrapServer) {
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServer); // NOCS
+    props.put("key.serializer", StringSerializer.class.getCanonicalName());
+    props.put("value.serializer", StringSerializer.class.getCanonicalName());
+    return props;
+  }
+
+  /**
+   * Start the UC3 service.
+   */
+  public void run() {
+    Objects.requireNonNull(this.clusterConfig, "No cluster config set.");
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Build a pipeline and start a Hazelcast Jet Instance and add a job that uses the built pipeline.
+   */
+  private void createHazelcastJetApplication() {
+   
+    // Build Pipeline for the History Service of UC3
+    final Pipeline pipeline = Pipeline.create();
+    final StreamStage<Map.Entry<String, String>> mapProduct =
+        pipeline
+            .readFrom(KafkaSources
+                .<String, ActivePowerRecord>kafka(
+                    this.kafkaReadPropsForPipeline, this.kafkaInputTopic))
+            // use Timestamps
+            .withNativeTimestamps(0)
+            // Map timestamp to hour of day and create new key using sensorID and
+            // datetime mapped to HourOfDay
+            .map(record -> {
+              String sensorId = record.getValue().getIdentifier();
+              long timestamp = record.getValue().getTimestamp();
+              LocalDateTime dateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp),
+                  TimeZone.getDefault().toZoneId());
+
+              final StatsKeyFactory<HourOfDayKey> keyFactory = new HoursOfDayKeyFactory();
+              HourOfDayKey newKey = keyFactory.createKey(sensorId, dateTime);
+
+              return Map.entry(newKey, record.getValue());
+            })
+            // group by new keys
+            .groupingKey(newRecord -> newRecord.getKey())
+            // Sliding/Hopping Window
+            .window(WindowDefinition.sliding(TimeUnit.SECONDS.toMillis(this.windowSizeInSeconds),
+                TimeUnit.SECONDS.toMillis(this.hoppingSizeInSeconds)))
+            // get average value of group (sensoreId,hourOfDay)
+            .aggregate(
+                AggregateOperations.averagingDouble(record -> record.getValue().getValueInW()))
+            // map to return pair (sensorID,hourOfDay) -> (averaged what value)
+            .map(agg -> {
+              String theValue = agg.getValue().toString();
+              String theKey = agg.getKey().toString();
+              return Map.entry(theKey, theValue);
+            });    
+    // Add Sink1: Logger
+    mapProduct.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    mapProduct.writeTo(KafkaSinks.<String, String>kafka(
+        this.kafkaWritePropsForPipeline, this.kafkaOutputTopic));
+
+    // Set network config for this hazelcast jet instance
+    // Create Hazelcast Config
+    final Config config = new Config().setClusterName(this.clusterConfig.getClusterNamePrefix());
+    final JoinConfig joinConfig = config.getNetworkConfig()
+        .setPort(this.clusterConfig.getPort())
+        .setPortAutoIncrement(this.clusterConfig.isPortAutoIncrement())
+        .getJoin();
+    // Set either Bootstrap Server Member or establish Kubernetes Connection
+    joinConfig.getMulticastConfig().setEnabled(false);
+    if (this.clusterConfig.hasBootstrapServer()) {
+      joinConfig.getTcpIpConfig().addMember(this.clusterConfig.getBootstrapServer());
+    } else if (this.clusterConfig.hasKubernetesDnsName()) {
+      joinConfig.getKubernetesConfig()
+        .setEnabled(true)
+        .setProperty(HZ_KUBERNETES_SERVICE_DNS_KEY, this.clusterConfig.getKubernetesDnsName());
+    }
+    
+    // Create Hazelcast Jet Instance
+    // Add config for jet instance, config for the job and add pipeline as the job
+    final JetInstance jet = Jet.newJetInstance();
+    jet.getConfig().setHazelcastConfig(config);
+    final JobConfig pipelineConfig = new JobConfig()
+        .registerSerializer(HourOfDayKey.class, HourOfDayKeySerializer.class)
+        .setName("uc3-hazelcastjet");
+    jet.newJobIfAbsent(pipeline, pipelineConfig).join();
+  }
+
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 000000000..e3371cc87
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc4-hazelcastjet/Dockerfile
new file mode 100644
index 000000000..1b5ce2202
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc3-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc3-hazelcastjet/bin/uc4-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/build.gradle b/theodolite-benchmarks/uc4-hazelcastjet/build.gradle
new file mode 100644
index 000000000..7fd51cca2
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/build.gradle
@@ -0,0 +1,6 @@
+plugins {
+  // common java conventions
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "theodolite.uc4.application.HistoryService"
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/HistoryService.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/HistoryService.java
new file mode 100644
index 000000000..7463fa794
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/HistoryService.java
@@ -0,0 +1,68 @@
+package theodolite.uc4.application;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+  // General Information (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+  private static final String KAFKA_BSERVER_DEFAULT = "localhost:19092";
+  // UC4 specific (default)
+  private static final String KAFKA_CONFIG_TOPIC_DEFAULT = "configuration";
+  private static final String KAFKA_FEEDBACK_TOPIC_DEFAULT = "aggregation-feedback";
+  private static final String WINDOW_SIZE_DEFAULT = "5000";
+
+  // -- (default) job name for this history serivce
+  private static final String JOB_NAME = "uc4-hazelcastjet";
+
+  /**
+   * Entrypoint for UC4 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc4HistoryService = new HistoryService();
+    try {
+      uc4HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      e.printStackTrace(); // NOPMD
+      System.out.println("An Exception occured. "// NOPMD
+          + "No history service is deployed! ABORT MISSION!");
+    }
+  }
+
+  /**
+   * Start a UC4 service.
+   *
+   * @throws Exception This Exception occurs if the Uc4HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC4 using the Uc1HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc4HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc4HazelcastJetFactory()
+        .setReadPropertiesFromEnv(KAFKA_BSERVER_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
+        .setWritePropertiesFromEnv(KAFKA_BSERVER_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
+        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
+        .setKafkaConfigurationTopicFromEnv(KAFKA_CONFIG_TOPIC_DEFAULT)
+        .setKafkaFeedbackTopicFromEnv(KAFKA_FEEDBACK_TOPIC_DEFAULT)
+        .setWindowSizeFromEnv(WINDOW_SIZE_DEFAULT)
+        .buildUc4JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .buildUc4Pipeline()        
+        .runUc4Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4HazelcastJetFactory.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4HazelcastJetFactory.java
new file mode 100644
index 000000000..598e8c2c3
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4HazelcastJetFactory.java
@@ -0,0 +1,386 @@
+package theodolite.uc4.application;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import java.util.Objects;
+import java.util.Properties;
+import org.slf4j.Logger;
+import theodolite.commons.hazelcastjet.ConfigurationKeys;
+import theodolite.commons.hazelcastjet.JetInstanceBuilder;
+import theodolite.uc4.application.uc4specifics.SensorGroupKey;
+import theodolite.uc4.application.uc4specifics.SensorGroupKeySerializer;
+import theodolite.uc4.application.uc4specifics.ValueGroup;
+import theodolite.uc4.application.uc4specifics.ValueGroupSerializer;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC4
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Read and Write Propertiesand set the input, output, and configuration topic. This can be done
+ * using internal functions of this factory. Outside data only refers to custom values or default
+ * values in case data of the environment cannot the fetched.
+ */
+public class Uc4HazelcastJetFactory {
+
+  // Information per History Service
+  private Properties kafkaInputReadPropsForPipeline;
+  private Properties kafkaConfigReadPropsForPipeline;
+  private Properties kafkaAggregationReadPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  private JetInstance uc4JetInstance;
+  private Pipeline uc4JetPipeline;
+  // UC4 specific
+  private String kafkaConfigurationTopic;
+  private String kafkaFeedbackTopic;
+  private int windowSize;
+
+  // Checkflags
+  private boolean readPropertiesSet;
+  private boolean writePropertiesSet;
+  private boolean inputTopicSet;
+  private boolean outputTopicSet;
+  private boolean pipelineSet;
+  private boolean jetInstanceSet;
+  private boolean kafkaConfigurationTopicSet;
+  private boolean kafkaFeedbackTopicSet;
+  private boolean windowSizeSet;
+
+  /**
+   * Create a new Hazelcast Jet Factory for UC4.
+   */
+  public Uc4HazelcastJetFactory() {
+    this.readPropertiesSet = false;
+    this.writePropertiesSet = false;
+    this.inputTopicSet = false;
+    this.outputTopicSet = false;
+    this.pipelineSet = false;
+    this.jetInstanceSet = false;
+    this.kafkaConfigurationTopicSet = false;
+    this.kafkaFeedbackTopicSet = false;
+    this.windowSizeSet = false;
+  }
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
+   */
+  public void runUc4Job(final String jobName) throws Exception { // NOPMD
+    if (this.jetInstanceSet) {
+      if (this.pipelineSet) {
+
+        // Adds the job name and joins a job to the JetInstance defined in this factory
+        final JobConfig jobConfig = new JobConfig()
+            .registerSerializer(ValueGroup.class, ValueGroupSerializer.class)
+            .registerSerializer(SensorGroupKey.class, SensorGroupKeySerializer.class)
+            .setName(jobName);
+        this.uc4JetInstance.newJobIfAbsent(this.uc4JetPipeline, jobConfig).join();
+
+      } else {
+        throw new Exception(// NOPMD
+            "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC4.");
+      }
+    } else {
+      throw new Exception("Jet Instance is not set! " // NOPMD
+          + "Cannot start a hazelcast jet job for UC4.");
+    }
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc4HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc4HazelcastJetFactory buildUc4JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc4JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    this.jetInstanceSet = true;
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc4HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc4HazelcastJetFactory buildUc4Pipeline() throws Exception { // NOPMD
+    // Check for set properties and set input topic
+    if (this.readPropertiesSet) {
+      if (this.writePropertiesSet) {
+        if (this.inputTopicSet) {
+          if (this.outputTopicSet) {
+            if (this.kafkaConfigurationTopicSet) {
+              if (this.windowSizeSet) {
+                if (this.jetInstanceSet) {
+                  if (this.kafkaFeedbackTopicSet) {
+                    // Build Pipeline Using the pipelineBuilder
+                    final Uc4PipelineBuilderNew pipeBuilder = new Uc4PipelineBuilderNew();
+                    this.uc4JetPipeline =
+                        pipeBuilder.build(this.kafkaInputReadPropsForPipeline,
+                            this.kafkaConfigReadPropsForPipeline,
+                            this.kafkaAggregationReadPropsForPipeline,
+                            this.kafkaWritePropsForPipeline,
+                            this.kafkaInputTopic, this.kafkaOutputTopic,
+                            this.kafkaConfigurationTopic,
+                            this.kafkaFeedbackTopic,
+                            this.windowSize);
+                    this.pipelineSet = true;
+                    // Return Uc4HazelcastJetBuilder factory
+                    return this;
+                  } else {
+                    throw new Exception("Feedback topic not set! " // NOPMD
+                        + "Cannot build pipeline."); // NOCS // NOPMD
+                  }
+                } else {
+                  throw new Exception("Jet Instance not set! " // NOPMD
+                      + "Cannot build pipeline."); // NOCS // NOPMD
+                }
+              } else {
+                throw new Exception("window size for pipeline not set! " // NOPMD
+                    + "Cannot build pipeline."); // NOCS // NOPMD
+              }
+            } else {
+              throw new Exception("configuratin topic for pipeline not set! " // NOPMD
+                  + "Cannot build pipeline."); // NOCS // NOPMD
+            }
+          } else {
+            throw new Exception("kafka output topic for pipeline not set! " // NOPMD
+                + "Cannot build pipeline."); // NOCS // NOPMD
+          }
+        } else {
+          throw new Exception("Kafka input topic for pipeline not set! " // NOPMD
+              + "Cannot build pipeline."); // NOCS // NOPMD
+        }
+      } else {
+        throw new Exception("Kafka Write Properties for pipeline not set! " // NOPMD
+            + "Cannot build pipeline."); // NOCS // NOPMD
+      }
+    } else {
+      throw new Exception("Kafka Read Properties for pipeline not set! " // NOPMD
+          + "Cannot build pipeline."); // NOCS // NOPMD
+    }
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc4HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
+   */
+  public Uc4HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault,
+      final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final Uc4KafkaPropertiesBuilder propsBuilder = new Uc4KafkaPropertiesBuilder();
+    final Properties kafkaInputReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault);
+    final Properties kafkaConfigReadProps =
+        propsBuilder.buildKafkaConfigReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault);
+    final Properties kafkaAggregationReadProps =
+        propsBuilder.buildKafkaAggregationReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault);
+    this.kafkaInputReadPropsForPipeline = kafkaInputReadProps;
+    this.kafkaConfigReadPropsForPipeline = kafkaConfigReadProps;
+    this.kafkaAggregationReadPropsForPipeline = kafkaAggregationReadProps;
+    this.readPropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @return The Uc4HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
+   */
+  public Uc4HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final Uc4KafkaPropertiesBuilder propsBuilder = new Uc4KafkaPropertiesBuilder();
+    final Properties kafkaWriteProps =
+        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault);
+    this.kafkaWritePropsForPipeline = kafkaWriteProps;
+    this.writePropertiesSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
+      final String inputTopic) {
+    this.kafkaInputTopic = inputTopic;
+    this.inputTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input output for the pipeline used in this builder.
+   *
+   * @param outputTopic The kafka topic used as the pipeline output.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
+    this.kafkaOutputTopic = outputTopic;
+    this.outputTopicSet = true;
+    return this;
+  }
+
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
+      final String defaultInputTopic) {
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    this.inputTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
+      final String defaultOutputTopic) {
+    this.kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        defaultOutputTopic);
+    this.outputTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the window size for the pipeline used in this builder.
+   * 
+   * @param windowSize the window size to be used for this pipeline.
+   * @return A Uc4HazelcastJetFactory with a set windowSize.
+   */
+  public Uc4HazelcastJetFactory setCustomWindowSize(// NOPMD
+      final int windowSize) {
+    this.windowSize = windowSize;
+    this.windowSizeSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the window size for the pipeline used in this builder from the environment.
+   * 
+   * @param defaultWindowSize the default window size to be used for this pipeline when none is set
+   *        in the environment.
+   * @return A Uc4HazelcastJetFactory with a set windowSize.
+   */
+  public Uc4HazelcastJetFactory setWindowSizeFromEnv(// NOPMD
+      final String defaultWindowSize) {
+    final String windowSize = (String) Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.WINDOW_SIZE_UC4),
+        defaultWindowSize);
+    final int windowSizeNumber = Integer.parseInt(windowSize);
+    this.windowSize = windowSizeNumber;
+    this.windowSizeSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the configuration topic for the pipeline used in this builder.
+   * 
+   * @param kafkaConfigurationTopic the configuration topic to be used for this pipeline.
+   * @return A Uc4HazelcastJetFactory with a set configuration topic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaConfigurationTopic(// NOPMD
+      final String kafkaConfigurationTopic) {
+    this.kafkaConfigurationTopic = kafkaConfigurationTopic;
+    this.kafkaConfigurationTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the configuration topic for the pipeline used in this builder from the environment.
+   * 
+   * @param defaultKafkaConfigurationTopic the default configuration topic to be used for this
+   *        pipeline when none is set in the environment.
+   * @return A Uc4HazelcastJetFactory with a set kafkaConfigurationTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaConfigurationTopicFromEnv(// NOPMD
+      final String defaultKafkaConfigurationTopic) {
+    this.kafkaConfigurationTopic = (String) Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC),
+        defaultKafkaConfigurationTopic);
+    this.kafkaConfigurationTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the Feedback topic for the pipeline used in this builder.
+   * 
+   * @param kafkaFeedbackTopic the Feedback topic to be used for this pipeline.
+   * @return A Uc4HazelcastJetFactory with a set Feedback topic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaFeedbackTopic(// NOPMD
+      final String kafkaFeedbackTopic) {
+    this.kafkaFeedbackTopic = kafkaFeedbackTopic;
+    this.kafkaFeedbackTopicSet = true;
+    return this;
+  }
+
+  /**
+   * Sets the Feedback topic for the pipeline used in this builder from the environment.
+   * 
+   * @param defaultKafkaFeedbackTopic the default Feedback topic to be used for this pipeline when
+   *        none is set in the environment.
+   * @return A Uc4HazelcastJetFactory with a set kafkaFeedbackTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaFeedbackTopicFromEnv(// NOPMD
+      final String defaultKafkaFeedbackTopic) {
+    this.kafkaFeedbackTopic = (String) Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_FEEDBACK_TOPIC),
+        defaultKafkaFeedbackTopic);
+    this.kafkaFeedbackTopicSet = true;
+    return this;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4KafkaPropertiesBuilder.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4KafkaPropertiesBuilder.java
new file mode 100644
index 000000000..0cca80b58
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4KafkaPropertiesBuilder.java
@@ -0,0 +1,124 @@
+package theodolite.uc4.application;
+
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.common.serialization.DoubleDeserializer;
+import org.apache.kafka.common.serialization.DoubleSerializer;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import theodolite.commons.hazelcastjet.ConfigurationKeys;
+import theodolite.uc4.application.uc4specifics.EventDeserializer;
+import titan.ccp.configuration.events.EventSerde;
+
+/**
+ * Builds a read and write Properties objects containing the needed kafka properties used for the
+ * UC4 benchmark of Hazelcast Jet.
+ */
+public class Uc4KafkaPropertiesBuilder {
+
+  /**
+   * Builds Kafka Properties used for the UC4 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by envrionment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC4
+   *         Pipeline.
+   */
+  public Properties buildKafkaInputReadPropsFromEnv(final String kafkaBootstrapServerDefault,
+      final String schemaRegistryUrlDefault) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServers); //NOCS
+    props.put("key.deserializer", StringDeserializer.class.getCanonicalName());
+    props.put("value.deserializer", KafkaAvroDeserializer.class);
+    props.put("specific.avro.reader", true);
+    props.put("schema.registry.url", schemaRegistryUrl);
+    props.put("auto.offset.reset", "latest");
+    return props;
+  }
+  
+  /**
+   * Builds Kafka Properties used for the UC4 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by envrionment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC4
+   *         Pipeline.
+   */
+  public Properties buildKafkaAggregationReadPropsFromEnv(final String kafkaBootstrapServerDefault,
+      final String schemaRegistryUrlDefault) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServers); //NOCS
+    props.put("key.deserializer", StringDeserializer.class.getCanonicalName());
+    props.put("value.deserializer", DoubleDeserializer.class.getCanonicalName());
+    props.put("specific.avro.reader", true);
+    props.put("schema.registry.url", schemaRegistryUrl);
+    return props;
+  }
+  
+  /**
+   * Builds Kafka Properties used for the UC4 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by envrionment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC4
+   *         Pipeline.
+   */
+  public Properties buildKafkaConfigReadPropsFromEnv(final String kafkaBootstrapServerDefault,
+      final String schemaRegistryUrlDefault) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServers); //NOCS
+    props.put("key.deserializer", EventDeserializer.class);
+    props.put("value.deserializer", StringDeserializer.class.getCanonicalName());
+    props.put("specific.avro.reader", true);
+    props.put("schema.registry.url", schemaRegistryUrl);
+    props.put("auto.offset.reset", "earliest");
+    return props;
+  }
+
+  /**
+   * Builds Kafka Properties used for the UC4 Benchmark pipeline.
+   * 
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by environment.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC4
+   *         Pipeline.
+   */
+  public Properties buildKafkaWritePropsFromEnv(final String kafkaBootstrapServerDefault) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+
+    final Properties props = new Properties();
+    props.put("bootstrap.servers", kafkaBootstrapServers); //NOCS
+    props.put("key.serializer", StringSerializer.class.getCanonicalName());
+    props.put("value.serializer", DoubleSerializer.class.getCanonicalName());
+    return props;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4PipelineBuilder.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4PipelineBuilder.java
new file mode 100644
index 000000000..c4a9d77f1
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4PipelineBuilder.java
@@ -0,0 +1,180 @@
+package theodolite.uc4.application;
+
+import com.hazelcast.function.FunctionEx;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.Traverser;
+import com.hazelcast.jet.Traversers;
+import com.hazelcast.jet.aggregate.AggregateOperations;
+import com.hazelcast.jet.datamodel.WindowResult;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.JoinClause;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamSourceStage;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.StreamStageWithKey;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import com.hazelcast.map.IMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.Properties;
+import java.util.Set;
+import org.apache.kafka.streams.kstream.KTable;
+import theodolite.uc4.application.uc4specifics.ValueGroup;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.records.ActivePowerRecord;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+import titan.ccp.model.sensorregistry.SensorRegistry;
+
+public class Uc4PipelineBuilder {
+
+  private Pipeline pipe = Pipeline.create();
+
+  // Data
+  private String kafkaInputTopic;
+  private String kafkaConfigurationTopic;
+  private Properties kafkaReadPropsForPipeline;
+  private JetInstance uc4JetInstance;
+
+
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
+   *        attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param windowSize The window size in milliseconds of the tumbling window used in the "last
+   *        values" aggregation of this pipeline.
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC4.
+   */
+  public Pipeline build(final Properties kafkaReadPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline, final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final String kafkaConfigurationTopic,
+      final int windowSize,
+      JetInstance jet) {
+
+
+    this.uc4JetInstance = jet;
+    
+    ///////////////////////
+    // 1. Configuration Map
+    // this.kafkaConfigurationTopic = kafkaConfigurationTopic;
+    // this.kafkaReadPropsForPipeline = kafkaReadPropsForPipeline;
+    // final IMap<String, Set<String>> parentSensorTable = this.buildParentSensorMap();
+    ///////////////////////
+    StreamStage<Entry<String, Set<String>>> configurationStream = null;
+    
+    ////////////////
+    // 2. Input Map   
+    // this.kafkaInputTopic = kafkaInputTopic;
+    // final IMap<String, ActivePowerRecord> inputTable = this.buildInputTable();  
+    ////////////////
+    StreamStage<Entry<String, ActivePowerRecord>> inputStream = this.pipe
+        .readFrom(KafkaSources.<String, ActivePowerRecord>kafka(
+            kafkaReadPropsForPipeline, kafkaInputTopic))
+        .withNativeTimestamps(0);
+        
+    StreamStage<Entry<String, Double>> reducedInputStream = inputStream
+        .map(inputEntry -> {
+          return Map.entry(inputEntry.getValue().getIdentifier(), 
+              inputEntry.getValue().getValueInW());
+        });
+    
+    //////////////////////////////////////////////////////////
+    // 3. Last Value Table from Input and Parent Sensor Table
+    // final IMap<WindowResult<SensorParentKey>, ActivePowerRecord> lastValueTable =
+    //     this.buildLastValueTable(parentSensorTable, inputTable);
+    ////////////////////////////////////////////////////////////
+    //StreamStage<Entry<String,ValueGroup>> jointStream =
+    //    inputStream.hashJoin(configurationStream, 
+    //        JoinClause.joinMapEntries(leftKeyFn), 
+    //        mapToOutputFn); // TODO hmm, how to join?
+    
+    // 4. Aggregation Stream
+    //final IMap<WindowResult<String>, AggregatedActivePowerRecord> aggregations =
+    //    this.buildAggregationStream(lastValueTable);
+
+    return pipe;
+  }
+
+  /**
+   * Uses a given configuration topic of kafka to get data which represents a table of sensor or
+   * group identifiers which are mapped to a set of groups and returns an IMap containing these
+   * entries.
+   * 
+   * TODO WORK IN PROGRESS - QUESTIONS REGARDING THE LAST STEPS
+   * 
+   * @return Returns a IMap<String, Set<String>> Object containing sensor/group identifiers and
+   *         their corresponsing groups/parents.
+   */
+  private IMap<String, Set<String>> buildParentSensorMap() {
+    // Read the raw configuration stream
+    StreamStage<Entry<Event, String>> configurationStream = this.pipe
+        .readFrom(KafkaSources.<Event, String>kafka(
+            kafkaReadPropsForPipeline, kafkaConfigurationTopic))
+        .withNativeTimestamps(0);
+
+    // Filter certain values out
+    StreamStage<Entry<Event, String>> filteredConfigurationStream = configurationStream
+        .filter(entry -> entry.getKey() == Event.SENSOR_REGISTRY_CHANGED
+            || entry.getKey() == Event.SENSOR_REGISTRY_STATUS);
+
+    // Map configuration String to Sensor Registry
+    StreamStage<Entry<Event, SensorRegistry>> mapped = filteredConfigurationStream
+        .map(inputEntry -> Map.entry(inputEntry.getKey(),
+            SensorRegistry.fromJson(inputEntry.getValue())));
+
+
+    // Flat Transform TODO Needs Traversers thingy?
+    StreamStage<Entry<String, Optional<Set<String>>>> flatMapped = mapped.flatMap(null);
+
+    // Group by Key TODO
+    StreamStageWithKey<Entry<String, Optional<Set<String>>>, Object> grouped =
+        flatMapped.groupingKey(entry -> entry.getKey());
+
+    // Aggregate TODO
+    IMap<String, Set<String>> aggregated =
+        (IMap<String, Set<String>>) grouped.rollingAggregate(null);
+
+
+    // Return
+    return aggregated;
+  }
+
+  /**
+   * Receives an input stream with sensor ID's and values and returns a filled IMap with such
+   * values.
+   * 
+   * TODO WORK IN PROGRESS - QUESTIONS
+   * 
+   * @return An IMap<String,ActivePowerRecord> Object with entries
+   */
+  private IMap<String, ActivePowerRecord> buildInputTable() {
+
+    final IMap<String, ActivePowerRecord> inputTable = uc4JetInstance.getMap("inputTable");
+
+    // Read Input Stream
+    // TODO MERGE STEP WITH AGGREGATION RESULTS SKIPPED AT THE MOMENT
+    StreamStage<Entry<String, ActivePowerRecord>> inputStream = this.pipe
+        .readFrom(KafkaSources.<String, ActivePowerRecord>kafka(
+            kafkaReadPropsForPipeline, kafkaInputTopic))
+        .withNativeTimestamps(0)
+        .map(entry -> {
+          inputTable.put(entry.getKey(), entry.getValue());
+          return entry;
+        });
+
+    return inputTable;
+
+  }
+
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4PipelineBuilderNew.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4PipelineBuilderNew.java
new file mode 100644
index 000000000..b4ed5c12a
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/Uc4PipelineBuilderNew.java
@@ -0,0 +1,278 @@
+package theodolite.uc4.application;
+
+import com.hazelcast.function.BiFunctionEx;
+import com.hazelcast.function.SupplierEx;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.Traverser;
+import com.hazelcast.jet.Traversers;
+import com.hazelcast.jet.Util;
+import com.hazelcast.jet.aggregate.AggregateOperations;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.JournalInitialPosition;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.Sources;
+import com.hazelcast.jet.pipeline.StageWithWindow;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.StreamStageWithKey;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import com.hazelcast.map.IMap;
+import java.util.Properties;
+import java.util.Set;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Map.Entry;
+import theodolite.uc4.application.uc4specifics.ChildParentsTransformer;
+import theodolite.uc4.application.uc4specifics.SensorGroupKey;
+import theodolite.uc4.application.uc4specifics.ValueGroup;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.records.ActivePowerRecord;
+import titan.ccp.model.sensorregistry.SensorRegistry;
+
+public class Uc4PipelineBuilderNew {
+
+  @SuppressWarnings("unchecked")
+  public Pipeline build(final Properties kafkaInputReadPropsForPipeline,
+      final Properties kafkaConfigReadPropsForPipeline,
+      final Properties kafkaAggregationReadPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline, 
+      final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final String kafkaConfigurationTopic,
+      final String kafkaFeedbackTopic,
+      final int windowSize) {
+
+    //////////////////////////////////
+    // The pipeline for this Use Case
+    Pipeline uc4Pipeline = Pipeline.create();
+    
+    //System.out.println("DEBUG: Window Size: " + windowSize);
+
+    //////////////////////////////////
+    // (1) Configuration Stream
+    StreamStage<Entry<Event, SensorRegistry>> configurationStream = uc4Pipeline
+        .readFrom(KafkaSources.<Event, String>kafka(
+            kafkaConfigReadPropsForPipeline, kafkaConfigurationTopic))
+        .withNativeTimestamps(0)
+        .map(data -> {
+
+          // DEBUG
+          // System.out.println("D E B U G: Got a configuration Stream Element!");
+          // System.out.println("Event: " + data.getKey().toString() + "; Sensor Registry: " +
+          // data.getValue().toString());
+
+          return data;
+
+        })
+        .filter(entry -> entry.getKey() == Event.SENSOR_REGISTRY_CHANGED ||
+            entry.getKey() == Event.SENSOR_REGISTRY_STATUS)
+        .map(data -> {
+
+          // DEBUG
+          // System.out.println("D E B U G: It passed through the filter");
+
+          return Map.entry(data.getKey(), SensorRegistry.fromJson(data.getValue()));
+        });
+
+    // Builds a new HashMap //
+    SupplierEx<? extends HashMap<String, Set<String>>> hashMapSupplier =
+        () -> new HashMap<String, Set<String>>();
+
+    // FlatMapFunction //
+    BiFunctionEx<? super HashMap<String, Set<String>>, ? super Entry<Event, SensorRegistry>, ? extends Traverser<Entry<String, Set<String>>>> flatMapFn =
+        (flatMapStage, eventItem) -> {
+          // Get Data
+          HashMap<String, Set<String>> oldParents =
+              (HashMap<String, Set<String>>) flatMapStage.clone();
+          SensorRegistry newSensorRegistry = (SensorRegistry) eventItem.getValue();
+
+          // Transform new Input
+          ChildParentsTransformer transformer = new ChildParentsTransformer("default-name");
+          Map<String, Set<String>> mapFromRegistry =
+              transformer.constructChildParentsPairs(newSensorRegistry);
+
+          // Compare both tables
+          HashMap<String, Set<String>> updates = new HashMap<String, Set<String>>();
+          for (String key : mapFromRegistry.keySet()) {
+            if (oldParents.containsKey(key)) {
+              if (!mapFromRegistry.get(key).equals(oldParents.get(key))) {
+                updates.put(key, mapFromRegistry.get(key));
+              }
+            } else {
+              updates.put(key, mapFromRegistry.get(key));
+            }
+          }
+
+          ArrayList<Entry<String, Set<String>>> updatesList =
+              new ArrayList<Entry<String, Set<String>>>(updates.entrySet());
+
+          /*
+           * DEBUG PRINT System.out.println("DEBUG FLATMAP ARRAY LIST"); for (Entry<String,
+           * Set<String>> entry : updatesList) { String debugString = "["; for (String group :
+           * entry.getValue()) { debugString = debugString + group + ","; } debugString =
+           * debugString + "]"; System.out.println("Entry discovered || Key: " + entry.getKey() +
+           * "; Groups: " + debugString); }
+           */
+
+          // Return traverser with differences
+          return Traversers.traverseIterable(updatesList)
+              .map(e -> Util.entry(e.getKey(), e.getValue()));
+
+        };
+
+    // Write into table sink
+    configurationStream
+        .flatMapStateful(hashMapSupplier, flatMapFn)
+        .writeTo(Sinks.mapWithUpdating(
+            "SensorParentMap", // The addressed IMAP
+            event -> event.getKey(), // The key to look for
+            (oldValue, newEntry) -> { // the new entry returned (null automatically results in
+                                      // deletion of entry)
+
+              // DEBUG
+              /*
+               * String debugFlatmapString = "["; for (String group : newEntry.getValue()) {
+               * debugFlatmapString = debugFlatmapString + group + ","; } debugFlatmapString =
+               * debugFlatmapString + "]"; System.out.println( "Flatmap Writes for key '" +
+               * newEntry.getKey() + "': " + debugFlatmapString);
+               */
+
+              // Write new set of groups
+              return newEntry.getValue();
+            }));
+
+    //////////////////////////////////
+    // (1) Sensor Input Stream
+    StreamStage<Entry<String, Double>> inputStream = uc4Pipeline
+        .readFrom(KafkaSources.<String, ActivePowerRecord>kafka(
+            kafkaInputReadPropsForPipeline, kafkaInputTopic))
+        .withNativeTimestamps(0)
+        .map(stream -> {
+
+          String sensorId = stream.getValue().getIdentifier();
+          Double valueInW = stream.getValue().getValueInW();
+
+          // DEBUG
+          System.out.println("INPUT D E B U G: Got an input Stream Element!");
+          System.out.println("[SensorId=" + sensorId + "//valueinW=" + valueInW.toString());
+
+          return Map.entry(sensorId, valueInW);
+        });
+
+    // (1) Aggregation Stream
+    StreamStage<Entry<String, Double>> aggregations = uc4Pipeline
+        .readFrom(KafkaSources.<String, Double>kafka(
+            kafkaAggregationReadPropsForPipeline, kafkaFeedbackTopic))
+        .withNativeTimestamps(0)
+        .map(stream -> {
+          
+          // DEBUG
+          System.out.println("AGGREGATION D E B U G: Got an aggregation Stream Element!");
+          System.out.println("[SensorId=" + stream.getKey() + "//valueinW=" + stream.getValue().toString());
+          
+          return stream;
+          
+        });
+        
+    // (2) UC4 Merge Input with aggregation stream
+    StreamStageWithKey<Entry<String, Double>, String> mergedInputAndAggregations = inputStream
+        .merge(aggregations)
+        .groupingKey(event -> event.getKey());
+
+    // (3) UC4 Join Configuration and Merges Input/Aggregation Stream
+    // [sensorKey , (value,Set<Groups>)]
+    StreamStage<Entry<String, ValueGroup>> joinedStage = mergedInputAndAggregations
+        .mapUsingIMap(
+            "SensorParentMap",
+            (sensorEvent, sensorParentsSet) -> {
+
+              // Get Data
+              Set<String> sensorParentsCasted = (Set<String>) sensorParentsSet;
+
+              if (sensorParentsCasted != null) {
+                ValueGroup valueParentsPair =
+                    new ValueGroup(sensorEvent.getValue(), sensorParentsCasted);
+                // Return solution
+                return Map.entry(sensorEvent.getKey(), valueParentsPair);
+              } else {
+                Set<String> nullSet = new HashSet<String>();
+                nullSet.add("NULL-GROUPSET");
+                return Map.entry(sensorEvent.getKey(),
+                    new ValueGroup(sensorEvent.getValue(), nullSet));
+              }
+
+
+            });
+
+    // (4) UC4 Duplicate as flatmap joined Stream
+    // [(sensorKey, Group) , value]
+    StreamStage<Entry<SensorGroupKey, Double>> dupliAsFlatmappedStage = joinedStage
+        .flatMap(entry -> {
+
+          // DEBUG
+          // System.out.println("D E B G U G Stage 4");
+
+          // Supplied data
+          String keyGroupId = entry.getKey();
+          Double valueInW = entry.getValue().getValueInW();
+          Set<String> groups = entry.getValue().getGroups();
+
+          // Transformed Data
+          String[] groupList = groups.toArray(String[]::new);
+          SensorGroupKey[] newKeyList = new SensorGroupKey[groupList.length];
+          ArrayList<Entry<SensorGroupKey, Double>> newEntryList =
+              new ArrayList<Entry<SensorGroupKey, Double>>();
+          for (int i = 0; i < groupList.length; i++) {
+            newKeyList[i] = new SensorGroupKey(keyGroupId, groupList[i]);
+            newEntryList.add(Map.entry(newKeyList[i], valueInW));
+            // DEBUG
+            // System.out.println("Added new Entry to list: [(" + newKeyList[i].getSensorId() + ","
+            // + newKeyList[i].getGroup() + ")," + valueInW.toString());
+          }
+
+
+
+          // Return traversable list of new entry elements
+          return Traversers.traverseIterable(newEntryList);
+
+        });
+
+    // (5) UC4 Last Value Map
+    // Table with tumbling window differentiation [ (sensorKey,Group) , value ],Time
+    // TODO: Implementation of static table to fill values out of the past!
+    StageWithWindow<Entry<SensorGroupKey, Double>> windowedLastValues = dupliAsFlatmappedStage
+        .window(WindowDefinition.tumbling(5000));
+
+    // (6) UC4 GroupBy and aggregate and map
+    // Group using the group out of the sensorGroupKey keys
+    StreamStage<Entry<String, Double>> groupedAggregatedMapped = windowedLastValues
+        .groupingKey(entry -> entry.getKey().getGroup())
+        .aggregate(AggregateOperations.summingDouble(entry -> entry.getValue()))
+        .map(agg -> {
+          String theGroup = agg.getKey();
+          Double summedValueInW = agg.getValue();
+          
+          //System.out.println("DEBUG - We have a grouped Aggregation Stage at the end!");
+          
+          return Util.entry(theGroup, summedValueInW);
+        });
+
+    // (7) Sink - Results back to Kafka
+    groupedAggregatedMapped.writeTo(KafkaSinks.<String, Double>kafka(
+        kafkaWritePropsForPipeline, kafkaOutputTopic));
+
+    // (7) Sink - Results back to Kafka
+    groupedAggregatedMapped.writeTo(KafkaSinks.<String, Double>kafka(
+        kafkaWritePropsForPipeline, kafkaFeedbackTopic));
+
+    // (7) Sink - Write to logger/console for debug puposes
+    groupedAggregatedMapped.writeTo(Sinks.logger());
+
+    // Return the pipeline
+    return uc4Pipeline;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ChildParentsTransformer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ChildParentsTransformer.java
new file mode 100644
index 000000000..f533e5df6
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ChildParentsTransformer.java
@@ -0,0 +1,111 @@
+package theodolite.uc4.application.uc4specifics;
+
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.apache.kafka.streams.KeyValue;
+import org.apache.kafka.streams.kstream.Transformer;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.state.KeyValueIterator;
+import org.apache.kafka.streams.state.KeyValueStore;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.sensorregistry.AggregatedSensor;
+import titan.ccp.model.sensorregistry.Sensor;
+import titan.ccp.model.sensorregistry.SensorRegistry;
+
+/**
+ * Transforms a {@link SensorRegistry} into key value pairs of Sensor identifiers and their parents'
+ * sensor identifiers. All pairs whose sensor's parents have changed since last iteration are
+ * forwarded. A mapping of an identifier to <code>null</code> means that the corresponding sensor
+ * does not longer exists in the sensor registry.
+ */
+public class ChildParentsTransformer implements
+    Transformer<Event, SensorRegistry, Iterable<KeyValue<String, Optional<Set<String>>>>> {
+
+  private final String stateStoreName;
+  // private ProcessorContext context;
+  private KeyValueStore<String, Set<String>> state;
+
+  public ChildParentsTransformer(final String stateStoreName) {
+    this.stateStoreName = stateStoreName;
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public void init(final ProcessorContext context) {
+    // this.context = context;
+    this.state = (KeyValueStore<String, Set<String>>) context.getStateStore(this.stateStoreName);
+  }
+
+  @Override
+  public Iterable<KeyValue<String, Optional<Set<String>>>> transform(final Event event,
+      final SensorRegistry registry) {
+
+    // Values may later be null for deleting a sensor
+    final Map<String, Set<String>> childParentsPairs = this.constructChildParentsPairs(registry);
+
+    this.updateChildParentsPairs(childParentsPairs);
+
+    this.updateState(childParentsPairs);
+
+    return childParentsPairs
+        .entrySet()
+        .stream()
+        .map(e -> KeyValue.pair(e.getKey(), Optional.ofNullable(e.getValue())))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public void close() {
+    // Do nothing
+  }
+
+  public Map<String, Set<String>> constructChildParentsPairs(final SensorRegistry registry) {
+    return this.streamAllChildren(registry.getTopLevelSensor())
+        .collect(Collectors.toMap(
+            child -> child.getIdentifier(),
+            child -> child.getParent()
+                .map(p -> Set.of(p.getIdentifier()))
+                .orElseGet(() -> Set.of())));
+  }
+
+  private Stream<Sensor> streamAllChildren(final AggregatedSensor sensor) {
+    return sensor.getChildren().stream()
+        .flatMap(s -> Stream.concat(
+            Stream.of(s),
+            s instanceof AggregatedSensor ? this.streamAllChildren((AggregatedSensor) s)
+                : Stream.empty()));
+  }
+
+  private void updateChildParentsPairs(final Map<String, Set<String>> childParentsPairs) {
+    final KeyValueIterator<String, Set<String>> oldChildParentsPairs = this.state.all();
+    while (oldChildParentsPairs.hasNext()) {
+      final KeyValue<String, Set<String>> oldChildParentPair = oldChildParentsPairs.next();
+      final String identifier = oldChildParentPair.key;
+      final Set<String> oldParents = oldChildParentPair.value;
+      final Set<String> newParents = childParentsPairs.get(identifier); // null if not exists
+      if (newParents == null) {
+        // Sensor was deleted
+        childParentsPairs.put(identifier, null);
+      } else if (newParents.equals(oldParents)) {
+        // No changes
+        childParentsPairs.remove(identifier);
+      }
+      // Else: Later Perhaps: Mark changed parents
+    }
+    oldChildParentsPairs.close();
+  }
+
+  private void updateState(final Map<String, Set<String>> childParentsPairs) {
+    for (final Map.Entry<String, Set<String>> childParentPair : childParentsPairs.entrySet()) {
+      if (childParentPair.getValue() == null) {
+        this.state.delete(childParentPair.getKey());
+      } else {
+        this.state.put(childParentPair.getKey(), childParentPair.getValue());
+      }
+    }
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/EventDeserializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/EventDeserializer.java
new file mode 100644
index 000000000..8ff81d1d6
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/EventDeserializer.java
@@ -0,0 +1,33 @@
+package theodolite.uc4.application.uc4specifics;
+
+import java.util.Map;
+import org.apache.kafka.common.serialization.ByteBufferDeserializer;
+import org.apache.kafka.common.serialization.Deserializer;
+import titan.ccp.configuration.events.Event;
+
+public class EventDeserializer implements Deserializer<Event> {
+
+  private final ByteBufferDeserializer byteBufferDeserializer = new ByteBufferDeserializer();
+
+  @Override
+  public void configure(final Map<String, ?> configs, final boolean isKey) {
+    this.byteBufferDeserializer.configure(configs, isKey);
+  }
+
+  @Override
+  public Event deserialize(final String topic, final byte[] data) {
+    final int ordinal = this.byteBufferDeserializer.deserialize(topic, data).getInt();
+    for (final Event event : Event.values()) {
+      if (ordinal == event.ordinal()) {
+        return event;
+      }
+    }
+    throw new IllegalArgumentException("Deserialized data is not a valid event.");
+  }
+
+  @Override
+  public void close() {
+    this.byteBufferDeserializer.close();
+  }
+  
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/HashMapSupplier.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/HashMapSupplier.java
new file mode 100644
index 000000000..171c4d09b
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/HashMapSupplier.java
@@ -0,0 +1,23 @@
+package theodolite.uc4.application.uc4specifics;
+
+import com.hazelcast.function.SupplierEx;
+import java.util.HashMap;
+import java.util.Set;
+
+public class HashMapSupplier implements SupplierEx<HashMap<String,Set<String>>>{
+  
+  private static final long serialVersionUID = -6247504592403610702L;
+
+  @Override
+  public HashMap<String, Set<String>> get() {
+    return new HashMap<String, Set<String>>();
+  }
+
+  @Override
+  public HashMap<String, Set<String>> getEx() throws Exception {
+    return null;
+  }
+
+  
+  
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/SensorGroupKey.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/SensorGroupKey.java
new file mode 100644
index 000000000..43dae021f
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/SensorGroupKey.java
@@ -0,0 +1,50 @@
+package theodolite.uc4.application.uc4specifics;
+
+import java.util.Objects;
+
+/**
+ * Structure (sensorId, group)
+ */
+public class SensorGroupKey {
+
+  private final String sensorId;
+  private final String group;
+
+  public SensorGroupKey(String sensorId, String group) {
+    this.sensorId = sensorId;
+    this.group = group;
+  }
+
+  public String getSensorId() {
+    return this.sensorId;
+  }
+
+  public String getGroup() {
+    return this.group;
+  }
+
+  @Override
+  public String toString() {
+    return "[SensorId: " + this.sensorId + "; Group: " + this.group + "]";
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.sensorId, this.group);
+  }
+
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (obj instanceof SensorGroupKey) {
+      final SensorGroupKey other = (SensorGroupKey) obj;
+      return Objects.equals(this.sensorId, other.sensorId)
+          && Objects.equals(this.group, other.group);
+    }
+    return false;
+  }
+
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/SensorGroupKeySerializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/SensorGroupKeySerializer.java
new file mode 100644
index 000000000..d45ec67e0
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/SensorGroupKeySerializer.java
@@ -0,0 +1,30 @@
+package theodolite.uc4.application.uc4specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+
+public class SensorGroupKeySerializer implements StreamSerializer<SensorGroupKey>{
+  
+private static final int TYPE_ID = 2;
+  
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final SensorGroupKey key) throws IOException {
+    out.writeString(key.getSensorId()); 
+    out.writeString(key.getGroup()); 
+  }
+
+  @Override
+  public SensorGroupKey read(final ObjectDataInput in) throws IOException {
+    return new SensorGroupKey(in.readString(), in.readString());
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ValueGroup.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ValueGroup.java
new file mode 100644
index 000000000..e0ee77b2c
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ValueGroup.java
@@ -0,0 +1,54 @@
+package theodolite.uc4.application.uc4specifics;
+
+import java.util.Objects;
+import java.util.Set;
+
+/**
+ * Structure: (valueInW, Set(Groups))
+ */
+public class ValueGroup {
+
+  private final Double valueInW;
+  private final Set<String> groups;
+  
+  public ValueGroup(Double valueInW, Set<String> groups) {
+    this.valueInW = valueInW;
+    this.groups = groups;
+  }
+  
+  public Double getValueInW() {
+    return this.valueInW;
+  }
+  
+  public Set<String> getGroups() {
+    return this.groups;
+  }
+  
+  @Override
+  public String toString() {
+    String groupString = "[";
+    for (String group: groups) {
+      groupString = groupString + group + "/";
+    }
+    return this.valueInW.toString() + ";" + groupString + "]";
+  }
+  
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.valueInW, this.groups);
+  }
+  
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (obj instanceof ValueGroup) {
+      final ValueGroup other = (ValueGroup) obj;
+      return Objects.equals(this.valueInW, other.valueInW)
+          && this.groups.containsAll(other.groups);
+    }
+    return false;
+  }
+  
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ValueGroupSerializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ValueGroupSerializer.java
new file mode 100644
index 000000000..fa72b3d7f
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/theodolite/uc4/application/uc4specifics/ValueGroupSerializer.java
@@ -0,0 +1,32 @@
+package theodolite.uc4.application.uc4specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+
+/** A pipeline serializer for the HourOfDayKey to allow for parallelization. */
+public class ValueGroupSerializer implements StreamSerializer<ValueGroup> {
+
+  private static final int TYPE_ID = 1;
+  
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final ValueGroup key) throws IOException {
+    out.writeDouble(key.getValueInW()); 
+    out.writeString(String.join(",", key.getGroups())); 
+  }
+
+  @Override
+  public ValueGroup read(final ObjectDataInput in) throws IOException {
+    return new ValueGroup(in.readDouble(), new HashSet<>(Arrays.asList(in.readString().split(","))));
+  }
+
+}
+
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 000000000..e3371cc87
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
-- 
GitLab