diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 0b8a87bb35ddfabf966e59b9e43af639e0b7fe41..37826729b815d0390d0e64d8c0409a2f9fa90f13 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -573,6 +573,16 @@ smoketest-uc3-kstreams:
     DOCKER_COMPOSE_DIR: "uc3-kstreams"
     JAVA_PROJECT_DEPS: "uc3-kstreams,kstreams-commons,uc3-load-generator,load-generator-commons"
 
+smoketest-uc3-flink:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc3-flink
+    - deploy-uc3-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc3-flink"
+    JAVA_PROJECT_DEPS: "uc3-flink,flink-commons,uc3-load-generator,load-generator-commons"
+
+
 smoketest-uc3-beam-flink:
   extends: .smoketest-benchmarks
   needs:
diff --git a/docs/api-reference/patchers.md b/docs/api-reference/patchers.md
index bea63ccd23decef5654f257221ce0358b4f68e45..b0c99437fd003072b22b10df884ece88e3234c52 100644
--- a/docs/api-reference/patchers.md
+++ b/docs/api-reference/patchers.md
@@ -27,6 +27,18 @@ Patchers can be seen as functions which take a value as input and modify a Kuber
   * **properties**:
     * loadGenMaxRecords: 150000
 
+* **DataVolumeLoadGeneratorReplicaPatcher**: Takes the total load that should be generated and computes the number of instances needed for this load based on the `maxVolume` ((load + maxVolume - 1) / maxVolume) and calculates the load per instance (loadPerInstance = load / instances). The number of instances are set for the load generator and the given variable is set to the load per instance.
+  * **type**: "DataVolumeLoadGeneratorReplicaPatcher"
+  * **resource**: "osp-load-generator-deployment.yaml"
+  * **properties**:
+    * maxVolume: "50"
+    * container: "workload-generator"
+    * variableName: "DATA_VOLUME"
+
+* **ReplicaPatcher**: Allows to modify the number of Replicas for a kubernetes deployment.
+  * **type**: "ReplicaPatcher"
+  * **resource**: "uc1-kstreams-deployment.yaml"
+
 * **EnvVarPatcher**: Modifies the value of an environment variable for a container in a Kubernetes deployment. 
   * **type**: "EnvVarPatcher"
   * **resource**: "uc1-load-generator-deployment.yaml"
@@ -34,6 +46,14 @@ Patchers can be seen as functions which take a value as input and modify a Kuber
     * container: "workload-generator"
     * variableName: "NUM_SENSORS"
 
+* **ConfigMapYamlPatcher**: allows to add/modify a key-value pair in a YAML file of a ConfigMap
+  * **type**: "ConfigMapYamlPatcher"
+  * **resource**: "flink-configuration-configmap.yaml"
+  * **properties**:
+    * fileName: "flink-conf.yaml"
+    * variableName: "jobmanager.memory.process.size"
+  * **value**: "4Gb"
+
 * **NodeSelectorPatcher**: Changes the node selection field in Kubernetes resources.
   * **type**: "NodeSelectorPatcher"
   * **resource**: "uc1-load-generator-deployment.yaml"
diff --git a/docs/project-info.md b/docs/project-info.md
index 36019019dcaf486fb0befb06b3550fafd41f8855..5cca3c92ffb70df98f19d6275953de51b7216116 100644
--- a/docs/project-info.md
+++ b/docs/project-info.md
@@ -26,6 +26,7 @@ Theodolite's internal development including issue boards, merge requests and ext
 * [Lorenz Boguhn](https://github.com/lorenzboguhn)
 * [Simon Ehrenstein](https://github.com/sehrenstein)
 * [Willi Hasselbring](https://www.se.informatik.uni-kiel.de/en/team/prof.-dr.-wilhelm-willi-hasselbring)
+* [Luca Mertens](https://www.linkedin.com/in/luca-mertens-35a932201)
 * [Tobias Pfandzelter](https://pfandzelter.com/)
 * [Julia Rossow](https://www.linkedin.com/in/julia-rossow/)
 * [Björn Vonheiden](https://github.com/bvonheid)
diff --git a/helm/README.md b/helm/README.md
index 172ff98b7e06d337a8ff634b25b0c7b69218dd78..754146c514046764d58435e06237dca834d8803b 100644
--- a/helm/README.md
+++ b/helm/README.md
@@ -67,10 +67,11 @@ kubectl delete crd thanosrulers.monitoring.coreos.com
 
 The following 3rd party charts are used by Theodolite:
 
-- Kube Prometheus Stack (to install the Prometheus Operator, which is used to create a Prometheus instances)
-- Grafana (including a dashboard and a data source configuration)
-- Confluent Platform (for Kafka and Zookeeper)
-- Kafka Lag Exporter (used to collect monitoring data of the Kafka lag)
+- Kube Prometheus Stack
+  - to install the Prometheus Operator, which is used to create a Prometheus instances
+  - to deploy Grafana (including a dashboard and a data source configuration)
+- Grafana (deprecated as replaced by Kube Prometheus Stack)
+- Strimzi (for managing Kafka and Zookeeper)
 
 ### Hints
 
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/rocks/theodolite/benchmarks/commons/beam/ConfigurationKeys.java b/theodolite-benchmarks/beam-commons/src/main/java/rocks/theodolite/benchmarks/commons/beam/ConfigurationKeys.java
index c22c164f62ad22d3c18add75ad5115fd15fb8f14..51105fcec36fcfb28b810a8ee2f37ed856908d76 100644
--- a/theodolite-benchmarks/beam-commons/src/main/java/rocks/theodolite/benchmarks/commons/beam/ConfigurationKeys.java
+++ b/theodolite-benchmarks/beam-commons/src/main/java/rocks/theodolite/benchmarks/commons/beam/ConfigurationKeys.java
@@ -4,7 +4,7 @@ package rocks.theodolite.benchmarks.commons.beam;
  * Keys to access configuration parameters.
  */
 public final class ConfigurationKeys {
-  // Common keys
+
   public static final String APPLICATION_NAME = "application.name";
 
   public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
@@ -13,26 +13,6 @@ public final class ConfigurationKeys {
 
   public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic";
 
-  // Additional topics
-  public static final String KAFKA_FEEDBACK_TOPIC = "kafka.feedback.topic";
-
-  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
-
-  public static final String KAFKA_CONFIGURATION_TOPIC = "kafka.configuration.topic";
-
-  // UC2
-  public static final String KAFKA_WINDOW_DURATION_MINUTES = "kafka.window.duration.minutes";
-
-  // UC3
-  public static final String AGGREGATION_DURATION_DAYS = "aggregation.duration.days";
-
-  public static final String AGGREGATION_ADVANCE_DAYS = "aggregation.advance.days";
-
-  // UC4
-  public static final String GRACE_PERIOD_MS = "grace.period.ms";
-
-
-  // BEAM
   public static final String ENABLE_AUTO_COMMIT = "enable.auto.commit";
 
   public static final String MAX_POLL_RECORDS = "max.poll.records";
@@ -41,9 +21,6 @@ public final class ConfigurationKeys {
 
   public static final String SPECIFIC_AVRO_READER = "specific.avro.reader";
 
-  public static final String TRIGGER_INTERVAL = "trigger.interval";
-
-
   private ConfigurationKeys() {}
 
 }
diff --git a/theodolite-benchmarks/definitions/uc1-beam-flink/resources/flink-configuration-configmap.yaml b/theodolite-benchmarks/definitions/uc1-beam-flink/resources/flink-configuration-configmap.yaml
deleted file mode 100644
index 36178e2bebdac96b8648bd6c299009aa49d3fff6..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/definitions/uc1-beam-flink/resources/flink-configuration-configmap.yaml
+++ /dev/null
@@ -1,66 +0,0 @@
-apiVersion: v1
-kind: ConfigMap
-metadata:
-  name: flink-config
-  labels:
-    app: flink
-data:
-  flink-conf.yaml: |+
-    jobmanager.rpc.address: flink-jobmanager
-    taskmanager.numberOfTaskSlots: 1 #TODO
-    #blob.server.port: 6124
-    #jobmanager.rpc.port: 6123
-    #taskmanager.rpc.port: 6122
-    #queryable-state.proxy.ports: 6125
-    #jobmanager.memory.process.size: 4Gb
-    #taskmanager.memory.process.size: 4Gb
-    #parallelism.default: 1 #TODO
-    metrics.reporter.prom.class: org.apache.flink.metrics.prometheus.PrometheusReporter
-    metrics.reporter.prom.interval: 10 SECONDS
-    taskmanager.network.detailed-metrics: true
-  # -> gives metrics about inbound/outbound network queue lengths
-  log4j-console.properties: |+
-    # This affects logging for both user code and Flink
-    rootLogger.level = INFO
-    rootLogger.appenderRef.console.ref = ConsoleAppender
-    rootLogger.appenderRef.rolling.ref = RollingFileAppender
-
-    # Uncomment this if you want to _only_ change Flink's logging
-    #logger.flink.name = org.apache.flink
-    #logger.flink.level = INFO
-
-    # The following lines keep the log level of common libraries/connectors on
-    # log level INFO. The root logger does not override this. You have to manually
-    # change the log levels here.
-    logger.akka.name = akka
-    logger.akka.level = INFO
-    logger.kafka.name= org.apache.kafka
-    logger.kafka.level = INFO
-    logger.hadoop.name = org.apache.hadoop
-    logger.hadoop.level = INFO
-    logger.zookeeper.name = org.apache.zookeeper
-    logger.zookeeper.level = INFO
-
-    # Log all infos to the console
-    appender.console.name = ConsoleAppender
-    appender.console.type = CONSOLE
-    appender.console.layout.type = PatternLayout
-    appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
-
-    # Log all infos in the given rolling file
-    appender.rolling.name = RollingFileAppender
-    appender.rolling.type = RollingFile
-    appender.rolling.append = false
-    appender.rolling.fileName = ${sys:log.file}
-    appender.rolling.filePattern = ${sys:log.file}.%i
-    appender.rolling.layout.type = PatternLayout
-    appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
-    appender.rolling.policies.type = Policies
-    appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
-    appender.rolling.policies.size.size=100MB
-    appender.rolling.strategy.type = DefaultRolloverStrategy
-    appender.rolling.strategy.max = 10
-
-    # Suppress the irrelevant (wrong) warnings from the Netty channel handler
-    logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline
-    logger.netty.level = OFF
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc1-beam-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc1-beam-flink/resources/jobmanager-deployment.yaml
index d274cba8a326ddaac7670e7797b899d6022b12c9..c9b48c83a995e4d7a12dcbcded223786d803835e 100644
--- a/theodolite-benchmarks/definitions/uc1-beam-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc1-beam-flink/resources/jobmanager-deployment.yaml
@@ -20,20 +20,22 @@ spec:
           image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
           args: ["standalone-job", "--job-classname", "rocks.theodolite.benchmarks.uc1.beam.flink.Uc1BeamFlink",
                   "--parallelism=$(PARALLELISM)",
-                  "--disableMetrics=true",
-                  "--fasterCopy"]
+                  "--disableMetrics=$(DISABLE_METRICS)",
+                  "--fasterCopy=$(FASTER_COPY)"]
           # optional arguments: ["--job-id", "<job id>", "--fromSavepoint", "/path/to/savepoint", "--allowNonRestoredState"]
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
             - name: CHECKPOINTING
               value: "false"
             - name: PARALLELISM
               value: "1"
+            - name: DISABLE_METRICS
+              value: "true"
+            - name: FASTER_COPY
+              value: "true"
             - name: "FLINK_STATE_BACKEND"
               value: "rocksdb"
             - name: JOB_MANAGER_RPC_ADDRESS
@@ -65,33 +67,5 @@ spec:
               port: 6123
             initialDelaySeconds: 30
             periodSeconds: 60
-          # volumeMounts:
-          #   - name: flink-config-volume-rw
-          #     mountPath: /opt/flink/conf
-#            - name: job-artifacts-volume
-#              mountPath: /opt/flink/usrlib
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      # initContainers:
-      #   - name: init-jobmanager
-      #     image: busybox:1.28
-      #     command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-      #     volumeMounts:
-      #       - name: flink-config-volume
-      #         mountPath: /flink-config/
-      #       - name: flink-config-volume-rw
-      #         mountPath: /flink-config-rw/
-      # volumes:
-      #   - name: flink-config-volume
-      #     configMap:
-      #       name: flink-config
-      #       items:
-      #         - key: flink-conf.yaml
-      #           path: flink-conf.yaml
-      #         - key: log4j-console.properties
-      #           path: log4j-console.properties
-      #   - name: flink-config-volume-rw
-      #     emptyDir: {}
-#        - name: job-artifacts-volume
-#          hostPath:
-#            path: /host/path/to/job/artifacts
diff --git a/theodolite-benchmarks/definitions/uc1-beam-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc1-beam-flink/resources/taskmanager-deployment.yaml
index bcf3a04f34dd039a3d75c86df095d09b9523f615..9e6e17ea26802d026255aefc6a64a0ea60a632f5 100644
--- a/theodolite-benchmarks/definitions/uc1-beam-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc1-beam-flink/resources/taskmanager-deployment.yaml
@@ -20,31 +20,8 @@ spec:
           image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
           args: ["taskmanager"]
           env:
-            - name: KAFKA_BOOTSTRAP_SERVERS
-              value: "theodolite-kafka-kafka-bootstrap:9092"
-            - name: SCHEMA_REGISTRY_URL
-              value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
-            - name: CHECKPOINTING
-              value: "false"
-            # - name: PARALLELISM
-            #   value: "1"
-            - name: "FLINK_STATE_BACKEND"
-              value: "rocksdb"
             - name: JOB_MANAGER_RPC_ADDRESS
               value: "flink-jobmanager"
-            # - name: TASK_MANAGER_NUMBER_OF_TASK_SLOTS
-            #   value: "1" #TODO
-            # - name: FLINK_PROPERTIES
-            #   value: |+
-            #     blob.server.port: 6124
-            #     jobmanager.rpc.port: 6123
-            #     taskmanager.rpc.port: 6122
-            #     queryable-state.proxy.ports: 6125
-            #     jobmanager.memory.process.size: 4Gb
-            #     taskmanager.memory.process.size: 4Gb
-            #     #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -56,33 +33,5 @@ spec:
               name: query-state
             - containerPort: 9249
               name: metrics
-          # livenessProbe:
-          #   tcpSocket:
-          #     port: 6122
-          #   initialDelaySeconds: 30
-          #   periodSeconds: 60
-          # volumeMounts:
-          #   - name: flink-config-volume-rw
-          #     mountPath: /opt/flink/conf/
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      # initContainers:
-      #   - name: init-taskmanager
-      #     image: busybox:1.28
-      #     command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-      #     volumeMounts:
-      #       - name: flink-config-volume
-      #         mountPath: /flink-config/
-      #       - name: flink-config-volume-rw
-      #         mountPath: /flink-config-rw/
-      # volumes:
-      #   - name: flink-config-volume
-      #     configMap:
-      #       name: flink-config
-      #       items:
-      #         - key: flink-conf.yaml
-      #           path: flink-conf.yaml
-      #         - key: log4j-console.properties
-      #           path: log4j-console.properties
-      #   - name: flink-config-volume-rw
-      #     emptyDir: {}
diff --git a/theodolite-benchmarks/definitions/uc1-beam-flink/uc1-beam-flink-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc1-beam-flink/uc1-beam-flink-benchmark-operator.yaml
index 0cf92309febc2df08d0f5b7b2c733397f778b60c..328a51745abeba130c45e29fa0c62060f852fbcd 100644
--- a/theodolite-benchmarks/definitions/uc1-beam-flink/uc1-beam-flink-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc1-beam-flink/uc1-beam-flink-benchmark-operator.yaml
@@ -2,19 +2,22 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc1-beam-flink
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc1
+    sut: beam-flink
 spec:
   sut:
     resources:
       - configMap:
           name: "benchmark-resources-uc1-beam-flink"
           files:
-          - "flink-configuration-configmap.yaml"
           - "taskmanager-deployment.yaml"
           - "taskmanager-service.yaml"
           - "service-monitor.yaml"
           - "jobmanager-service.yaml"
           - "jobmanager-deployment.yaml"
-          #- "jobmanager-rest-service.yaml"
+          # - "jobmanager-rest-service.yaml"
   loadGenerator:
     resources:
       - configMap:
@@ -32,11 +35,6 @@ spec:
           properties:
             container: "jobmanager"
             variableName: "PARALLELISM"
-        - type: "EnvVarPatcher" # required?
-          resource: "taskmanager-deployment.yaml"
-          properties:
-            container: "taskmanager"
-            variableName: "PARALLELISM"
   loadTypes:
     - typeName: "NumSensors"
       patchers:
@@ -64,4 +62,4 @@ spec:
     topics:
       - name: "input"
         numPartitions: 40
-        replicationFactor: 1
\ No newline at end of file
+        replicationFactor: 1
diff --git a/theodolite-benchmarks/definitions/uc1-beam-samza/resources/uc1-beam-samza-deployment.yaml b/theodolite-benchmarks/definitions/uc1-beam-samza/resources/uc1-beam-samza-deployment.yaml
index d7f453e2ab9c15bca83162520ebe7e90ce7be3ea..d10840efeda00b43d192fd8e3c1a6c6fe1c27bc6 100644
--- a/theodolite-benchmarks/definitions/uc1-beam-samza/resources/uc1-beam-samza-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc1-beam-samza/resources/uc1-beam-samza-deployment.yaml
@@ -25,14 +25,14 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: MAX_SOURCE_PARALLELISM
               value: "1024"
+            - name: ENABLE_METRICS
+              value: "false"
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
             # - name: JAVA_OPTS
             #   value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite-benchmarks/definitions/uc1-beam-samza/uc1-beam-samza-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc1-beam-samza/uc1-beam-samza-benchmark-operator.yaml
index 95b34a2a9bfc8b06bf01cc5b9eed30bb16fe71ca..615511caf2840c5b43b149d0c82daed0e62e111d 100644
--- a/theodolite-benchmarks/definitions/uc1-beam-samza/uc1-beam-samza-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc1-beam-samza/uc1-beam-samza-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc1-beam-samza
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc1
+    sut: beam-samza
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc1-flink/resources/flink-configuration-configmap.yaml b/theodolite-benchmarks/definitions/uc1-flink/resources/flink-configuration-configmap.yaml
index 36178e2bebdac96b8648bd6c299009aa49d3fff6..ecda4025087f103613b9700ca733962709c48836 100644
--- a/theodolite-benchmarks/definitions/uc1-flink/resources/flink-configuration-configmap.yaml
+++ b/theodolite-benchmarks/definitions/uc1-flink/resources/flink-configuration-configmap.yaml
@@ -7,18 +7,16 @@ metadata:
 data:
   flink-conf.yaml: |+
     jobmanager.rpc.address: flink-jobmanager
-    taskmanager.numberOfTaskSlots: 1 #TODO
-    #blob.server.port: 6124
-    #jobmanager.rpc.port: 6123
-    #taskmanager.rpc.port: 6122
-    #queryable-state.proxy.ports: 6125
-    #jobmanager.memory.process.size: 4Gb
-    #taskmanager.memory.process.size: 4Gb
-    #parallelism.default: 1 #TODO
+    blob.server.port: 6124
+    jobmanager.rpc.port: 6123
+    taskmanager.rpc.port: 6122
+    queryable-state.proxy.ports: 6125
+    jobmanager.memory.process.size: 4Gb
+    taskmanager.memory.process.size: 4Gb
     metrics.reporter.prom.class: org.apache.flink.metrics.prometheus.PrometheusReporter
     metrics.reporter.prom.interval: 10 SECONDS
+    # gives metrics about inbound/outbound network queue lengths
     taskmanager.network.detailed-metrics: true
-  # -> gives metrics about inbound/outbound network queue lengths
   log4j-console.properties: |+
     # This affects logging for both user code and Flink
     rootLogger.level = INFO
@@ -63,4 +61,4 @@ data:
 
     # Suppress the irrelevant (wrong) warnings from the Netty channel handler
     logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline
-    logger.netty.level = OFF
\ No newline at end of file
+    logger.netty.level = OFF
diff --git a/theodolite-benchmarks/definitions/uc1-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc1-flink/resources/jobmanager-deployment.yaml
index 74c5002dbdf988071ee43946b55b56d7d439a09e..e6112c476602cad9f6805a118f253f2800a945f1 100644
--- a/theodolite-benchmarks/definitions/uc1-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc1-flink/resources/jobmanager-deployment.yaml
@@ -23,25 +23,12 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
             - name: CHECKPOINTING
               value: "false"
             - name: PARALLELISM
               value: "1"
             - name: "FLINK_STATE_BACKEND"
               value: "rocksdb"
-            - name: JOB_MANAGER_RPC_ADDRESS
-              value: "flink-jobmanager"
-            - name: FLINK_PROPERTIES
-              value: |+
-                blob.server.port: 6124
-                jobmanager.rpc.port: 6123
-                taskmanager.rpc.port: 6122
-                queryable-state.proxy.ports: 6125
-                jobmanager.memory.process.size: 4Gb
-                taskmanager.memory.process.size: 4Gb
-                #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -63,21 +50,10 @@ spec:
             initialDelaySeconds: 30
             periodSeconds: 60
           volumeMounts:
-            - name: flink-config-volume-rw
+            - name: flink-config-volume
               mountPath: /opt/flink/conf
-#            - name: job-artifacts-volume
-#              mountPath: /opt/flink/usrlib
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      initContainers:
-        - name: init-jobmanager
-          image: busybox:1.28
-          command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-          volumeMounts:
-            - name: flink-config-volume
-              mountPath: /flink-config/
-            - name: flink-config-volume-rw
-              mountPath: /flink-config-rw/
       volumes:
         - name: flink-config-volume
           configMap:
@@ -87,8 +63,3 @@ spec:
                 path: flink-conf.yaml
               - key: log4j-console.properties
                 path: log4j-console.properties
-        - name: flink-config-volume-rw
-          emptyDir: {}
-#        - name: job-artifacts-volume
-#          hostPath:
-#            path: /host/path/to/job/artifacts
diff --git a/theodolite-benchmarks/definitions/uc1-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc1-flink/resources/taskmanager-deployment.yaml
index be23c1d6abf2ccdc39481997bff12f11fd8fd580..c945b036d26161c849a51246bcdae0e0f15398ff 100644
--- a/theodolite-benchmarks/definitions/uc1-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc1-flink/resources/taskmanager-deployment.yaml
@@ -18,32 +18,6 @@ spec:
       containers:
         - name: taskmanager
           image: ghcr.io/cau-se/theodolite-uc1-flink:latest
-          env:
-            - name: KAFKA_BOOTSTRAP_SERVERS
-              value: "theodolite-kafka-kafka-bootstrap:9092"
-            - name: SCHEMA_REGISTRY_URL
-              value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
-            - name: CHECKPOINTING
-              value: "false"
-            - name: PARALLELISM
-              value: "1"
-            - name: "FLINK_STATE_BACKEND"
-              value: "rocksdb"
-            - name: JOB_MANAGER_RPC_ADDRESS
-              value: "flink-jobmanager"
-            - name: TASK_MANAGER_NUMBER_OF_TASK_SLOTS
-              value: "1" #TODO
-            - name: FLINK_PROPERTIES
-              value: |+
-                blob.server.port: 6124
-                jobmanager.rpc.port: 6123
-                taskmanager.rpc.port: 6122
-                queryable-state.proxy.ports: 6125
-                jobmanager.memory.process.size: 4Gb
-                taskmanager.memory.process.size: 4Gb
-                #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -62,19 +36,10 @@ spec:
             initialDelaySeconds: 30
             periodSeconds: 60
           volumeMounts:
-            - name: flink-config-volume-rw
+            - name: flink-config-volume
               mountPath: /opt/flink/conf/
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      initContainers:
-        - name: init-taskmanager
-          image: busybox:1.28
-          command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-          volumeMounts:
-            - name: flink-config-volume
-              mountPath: /flink-config/
-            - name: flink-config-volume-rw
-              mountPath: /flink-config-rw/
       volumes:
         - name: flink-config-volume
           configMap:
@@ -84,5 +49,3 @@ spec:
                 path: flink-conf.yaml
               - key: log4j-console.properties
                 path: log4j-console.properties
-        - name: flink-config-volume-rw
-          emptyDir: {}
diff --git a/theodolite-benchmarks/definitions/uc1-flink/uc1-flink-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc1-flink/uc1-flink-benchmark-operator.yaml
index 0efc06906a41b60437c35af67bd28b0e06503f79..b7d33a11d597db3f6371353f8783dc0a7d2252fe 100644
--- a/theodolite-benchmarks/definitions/uc1-flink/uc1-flink-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc1-flink/uc1-flink-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc1-flink
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc1
+    sut: flink
 spec:
   sut:
     resources:
@@ -32,11 +36,6 @@ spec:
           properties:
             container: "jobmanager"
             variableName: "PARALLELISM"
-        - type: "EnvVarPatcher" # required?
-          resource: "taskmanager-deployment.yaml"
-          properties:
-            container: "taskmanager"
-            variableName: "PARALLELISM"
   loadTypes:
     - typeName: "NumSensors"
       patchers:
@@ -64,4 +63,4 @@ spec:
     topics:
       - name: "input"
         numPartitions: 40
-        replicationFactor: 1
\ No newline at end of file
+        replicationFactor: 1
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml
index 28cf4761d44a4d98c1d2b889f9c6a679ec96304a..636fde2588f1a454471d1b491c83895266c0e5b9 100644
--- a/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml
@@ -21,8 +21,6 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
             - name: KUBERNETES_NAMESPACE
               valueFrom:
                 fieldRef:
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-benchmark-operator.yaml
index f1754bd3a9b856ae95a5b2b149833833ce4c196f..9835950efe1c670c5518b6b11c17e9fcc1125ca5 100644
--- a/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-hazelcastjet-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc1-hazelcastjet
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc1
+    sut: hazelcastjet
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc1-kstreams/resources/uc1-kstreams-deployment.yaml b/theodolite-benchmarks/definitions/uc1-kstreams/resources/uc1-kstreams-deployment.yaml
index 7698dc50306901d8fd7e2f4ab79f2f362b909d54..37d9004bba2f6ca3cbda370e9d38977aea014f1a 100644
--- a/theodolite-benchmarks/definitions/uc1-kstreams/resources/uc1-kstreams-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc1-kstreams/resources/uc1-kstreams-deployment.yaml
@@ -26,8 +26,6 @@ spec:
               value: "http://theodolite-kafka-schema-registry:8081"
             - name: JAVA_OPTS
               value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite-benchmarks/definitions/uc1-kstreams/uc1-kstreams-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc1-kstreams/uc1-kstreams-benchmark-operator.yaml
index 6f8f39b396c24a174e98a58d522b00fd010ed2d8..2d03fdaeb216bdaac60b1d8d50646cae296e4ea9 100644
--- a/theodolite-benchmarks/definitions/uc1-kstreams/uc1-kstreams-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc1-kstreams/uc1-kstreams-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc1-kstreams
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc1
+    sut: kstreams
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc2-beam-flink/resources/flink-configuration-configmap.yaml b/theodolite-benchmarks/definitions/uc2-beam-flink/resources/flink-configuration-configmap.yaml
deleted file mode 100644
index 36178e2bebdac96b8648bd6c299009aa49d3fff6..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/definitions/uc2-beam-flink/resources/flink-configuration-configmap.yaml
+++ /dev/null
@@ -1,66 +0,0 @@
-apiVersion: v1
-kind: ConfigMap
-metadata:
-  name: flink-config
-  labels:
-    app: flink
-data:
-  flink-conf.yaml: |+
-    jobmanager.rpc.address: flink-jobmanager
-    taskmanager.numberOfTaskSlots: 1 #TODO
-    #blob.server.port: 6124
-    #jobmanager.rpc.port: 6123
-    #taskmanager.rpc.port: 6122
-    #queryable-state.proxy.ports: 6125
-    #jobmanager.memory.process.size: 4Gb
-    #taskmanager.memory.process.size: 4Gb
-    #parallelism.default: 1 #TODO
-    metrics.reporter.prom.class: org.apache.flink.metrics.prometheus.PrometheusReporter
-    metrics.reporter.prom.interval: 10 SECONDS
-    taskmanager.network.detailed-metrics: true
-  # -> gives metrics about inbound/outbound network queue lengths
-  log4j-console.properties: |+
-    # This affects logging for both user code and Flink
-    rootLogger.level = INFO
-    rootLogger.appenderRef.console.ref = ConsoleAppender
-    rootLogger.appenderRef.rolling.ref = RollingFileAppender
-
-    # Uncomment this if you want to _only_ change Flink's logging
-    #logger.flink.name = org.apache.flink
-    #logger.flink.level = INFO
-
-    # The following lines keep the log level of common libraries/connectors on
-    # log level INFO. The root logger does not override this. You have to manually
-    # change the log levels here.
-    logger.akka.name = akka
-    logger.akka.level = INFO
-    logger.kafka.name= org.apache.kafka
-    logger.kafka.level = INFO
-    logger.hadoop.name = org.apache.hadoop
-    logger.hadoop.level = INFO
-    logger.zookeeper.name = org.apache.zookeeper
-    logger.zookeeper.level = INFO
-
-    # Log all infos to the console
-    appender.console.name = ConsoleAppender
-    appender.console.type = CONSOLE
-    appender.console.layout.type = PatternLayout
-    appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
-
-    # Log all infos in the given rolling file
-    appender.rolling.name = RollingFileAppender
-    appender.rolling.type = RollingFile
-    appender.rolling.append = false
-    appender.rolling.fileName = ${sys:log.file}
-    appender.rolling.filePattern = ${sys:log.file}.%i
-    appender.rolling.layout.type = PatternLayout
-    appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
-    appender.rolling.policies.type = Policies
-    appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
-    appender.rolling.policies.size.size=100MB
-    appender.rolling.strategy.type = DefaultRolloverStrategy
-    appender.rolling.strategy.max = 10
-
-    # Suppress the irrelevant (wrong) warnings from the Netty channel handler
-    logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline
-    logger.netty.level = OFF
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc2-beam-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc2-beam-flink/resources/jobmanager-deployment.yaml
index 667559010b0dc9f642f9cb0e8c9d257d70a2dbaa..27c0a5082ec5e2a5fd0e61800dc6a0429ace8c80 100644
--- a/theodolite-benchmarks/definitions/uc2-beam-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc2-beam-flink/resources/jobmanager-deployment.yaml
@@ -20,20 +20,22 @@ spec:
           image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
           args: ["standalone-job", "--job-classname", "rocks.theodolite.benchmarks.uc2.beam.flink.Uc2BeamFlink",
                   "--parallelism=$(PARALLELISM)",
-                  "--disableMetrics=true",
-                  "--fasterCopy"]
+                  "--disableMetrics=$(DISABLE_METRICS)",
+                  "--fasterCopy=$(FASTER_COPY)"]
           # optional arguments: ["--job-id", "<job id>", "--fromSavepoint", "/path/to/savepoint", "--allowNonRestoredState"]
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
             - name: CHECKPOINTING
               value: "false"
             - name: PARALLELISM
               value: "1"
+            - name: DISABLE_METRICS
+              value: "true"
+            - name: FASTER_COPY
+              value: "true"
             - name: "FLINK_STATE_BACKEND"
               value: "rocksdb"
             - name: JOB_MANAGER_RPC_ADDRESS
diff --git a/theodolite-benchmarks/definitions/uc2-beam-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc2-beam-flink/resources/taskmanager-deployment.yaml
index f4f7a9c54a55c42dffe838925a8a97e73ef74759..b52d4198f6dc290d15c7496ba5c0f8b741dc6303 100644
--- a/theodolite-benchmarks/definitions/uc2-beam-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc2-beam-flink/resources/taskmanager-deployment.yaml
@@ -20,29 +20,8 @@ spec:
           image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
           args: ["taskmanager"]
           env:
-            - name: KAFKA_BOOTSTRAP_SERVERS
-              value: "theodolite-kafka-kafka-bootstrap:9092"
-            - name: SCHEMA_REGISTRY_URL
-              value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
-            - name: CHECKPOINTING
-              value: "false"
-            - name: "FLINK_STATE_BACKEND"
-              value: "rocksdb"
             - name: JOB_MANAGER_RPC_ADDRESS
               value: "flink-jobmanager"
-            # - name: TASK_MANAGER_NUMBER_OF_TASK_SLOTS
-            #   value: "1" #TODO
-            # - name: FLINK_PROPERTIES
-            #   value: |+
-            #     blob.server.port: 6124
-            #     jobmanager.rpc.port: 6123
-            #     taskmanager.rpc.port: 6122
-            #     queryable-state.proxy.ports: 6125
-            #     jobmanager.memory.process.size: 4Gb
-            #     taskmanager.memory.process.size: 4Gb
-            #     #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -54,11 +33,5 @@ spec:
               name: query-state
             - containerPort: 9249
               name: metrics
-          # livenessProbe:
-          #   tcpSocket:
-          #     port: 6122
-          #   initialDelaySeconds: 30
-          #   periodSeconds: 60
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-
diff --git a/theodolite-benchmarks/definitions/uc2-beam-flink/uc2-beam-flink-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc2-beam-flink/uc2-beam-flink-benchmark-operator.yaml
index 255c3d0b0cf1c5b61278e3fe888530ba596aaa35..39ddaa38df4a1f1f5c3db45afa14b00ee7eb9087 100644
--- a/theodolite-benchmarks/definitions/uc2-beam-flink/uc2-beam-flink-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc2-beam-flink/uc2-beam-flink-benchmark-operator.yaml
@@ -2,19 +2,22 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc2-beam-flink
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc2
+    sut: beam-flink
 spec:
   sut:
     resources:
       - configMap:
           name: "benchmark-resources-uc2-beam-flink"
           files:
-          - "flink-configuration-configmap.yaml"
           - "taskmanager-deployment.yaml"
           - "taskmanager-service.yaml"
           - "service-monitor.yaml"
           - "jobmanager-service.yaml"
           - "jobmanager-deployment.yaml"
-          #- "jobmanager-rest-service.yaml"
+          # - "jobmanager-rest-service.yaml"
   loadGenerator:
     resources:
       - configMap:
@@ -32,11 +35,6 @@ spec:
           properties:
             container: "jobmanager"
             variableName: "PARALLELISM"
-        - type: "EnvVarPatcher" # required?
-          resource: "taskmanager-deployment.yaml"
-          properties:
-            container: "taskmanager"
-            variableName: "PARALLELISM"
   loadTypes:
     - typeName: "NumSensors"
       patchers:
@@ -69,4 +67,4 @@ spec:
         numPartitions: 40
         replicationFactor: 1
       - name: "theodolite-.*"
-        removeOnly: True
\ No newline at end of file
+        removeOnly: True
diff --git a/theodolite-benchmarks/definitions/uc2-beam-samza/resources/uc2-beam-samza-deployment.yaml b/theodolite-benchmarks/definitions/uc2-beam-samza/resources/uc2-beam-samza-deployment.yaml
index 4266b47a15f2b43e7d746a581c1cd5b1f7321096..986740cbf63952b076048a026ef086276433b8bc 100644
--- a/theodolite-benchmarks/definitions/uc2-beam-samza/resources/uc2-beam-samza-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc2-beam-samza/resources/uc2-beam-samza-deployment.yaml
@@ -25,12 +25,12 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: MAX_SOURCE_PARALLELISM
               value: "1024"
+            - name: ENABLE_METRICS
+              value: "false"
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite-benchmarks/definitions/uc2-beam-samza/uc2-beam-samza-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc2-beam-samza/uc2-beam-samza-benchmark-operator.yaml
index 282ffe406b046d306e3950ab825d84ceac3e5d96..c385df649ae9d333ae6319631a3211cf441b48e5 100644
--- a/theodolite-benchmarks/definitions/uc2-beam-samza/uc2-beam-samza-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc2-beam-samza/uc2-beam-samza-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc2-beam-samza
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc2
+    sut: beam-samza
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc2-flink/resources/flink-configuration-configmap.yaml b/theodolite-benchmarks/definitions/uc2-flink/resources/flink-configuration-configmap.yaml
index 321541f6ac8715b8546b964d8ad2b7c28552fbcd..ecda4025087f103613b9700ca733962709c48836 100644
--- a/theodolite-benchmarks/definitions/uc2-flink/resources/flink-configuration-configmap.yaml
+++ b/theodolite-benchmarks/definitions/uc2-flink/resources/flink-configuration-configmap.yaml
@@ -6,19 +6,17 @@ metadata:
     app: flink
 data:
   flink-conf.yaml: |+
-    #jobmanager.rpc.address: flink-jobmanager
-    #taskmanager.numberOfTaskSlots: 1 #TODO
-    #blob.server.port: 6124
-    #jobmanager.rpc.port: 6123
-    #taskmanager.rpc.port: 6122
-    #queryable-state.proxy.ports: 6125
-    #jobmanager.memory.process.size: 4Gb
-    #taskmanager.memory.process.size: 4Gb
-    #parallelism.default: 1 #TODO
+    jobmanager.rpc.address: flink-jobmanager
+    blob.server.port: 6124
+    jobmanager.rpc.port: 6123
+    taskmanager.rpc.port: 6122
+    queryable-state.proxy.ports: 6125
+    jobmanager.memory.process.size: 4Gb
+    taskmanager.memory.process.size: 4Gb
     metrics.reporter.prom.class: org.apache.flink.metrics.prometheus.PrometheusReporter
     metrics.reporter.prom.interval: 10 SECONDS
+    # gives metrics about inbound/outbound network queue lengths
     taskmanager.network.detailed-metrics: true
-  # -> gives metrics about inbound/outbound network queue lengths
   log4j-console.properties: |+
     # This affects logging for both user code and Flink
     rootLogger.level = INFO
@@ -63,4 +61,4 @@ data:
 
     # Suppress the irrelevant (wrong) warnings from the Netty channel handler
     logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline
-    logger.netty.level = OFF
\ No newline at end of file
+    logger.netty.level = OFF
diff --git a/theodolite-benchmarks/definitions/uc2-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc2-flink/resources/jobmanager-deployment.yaml
index d9d7182837dcd29dec80e74d631270b678dc9a7f..512667e756bfe78ca68286b396fb1e867ab5915b 100644
--- a/theodolite-benchmarks/definitions/uc2-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc2-flink/resources/jobmanager-deployment.yaml
@@ -23,25 +23,12 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
             - name: CHECKPOINTING
               value: "false"
             - name: PARALLELISM
               value: "1"
             - name: "FLINK_STATE_BACKEND"
               value: "rocksdb"
-            - name: JOB_MANAGER_RPC_ADDRESS
-              value: "flink-jobmanager"
-            - name: FLINK_PROPERTIES
-              value: |+
-                blob.server.port: 6124
-                jobmanager.rpc.port: 6123
-                taskmanager.rpc.port: 6122
-                queryable-state.proxy.ports: 6125
-                jobmanager.memory.process.size: 4Gb
-                taskmanager.memory.process.size: 4Gb
-                #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -63,21 +50,10 @@ spec:
             initialDelaySeconds: 30
             periodSeconds: 60
           volumeMounts:
-            - name: flink-config-volume-rw
+            - name: flink-config-volume
               mountPath: /opt/flink/conf
-#            - name: job-artifacts-volume
-#              mountPath: /opt/flink/usrlib
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      initContainers:
-        - name: init-jobmanager
-          image: busybox:1.28
-          command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-          volumeMounts:
-            - name: flink-config-volume
-              mountPath: /flink-config/
-            - name: flink-config-volume-rw
-              mountPath: /flink-config-rw/
       volumes:
         - name: flink-config-volume
           configMap:
@@ -87,8 +63,3 @@ spec:
                 path: flink-conf.yaml
               - key: log4j-console.properties
                 path: log4j-console.properties
-        - name: flink-config-volume-rw
-          emptyDir: {}
-#        - name: job-artifacts-volume
-#          hostPath:
-#            path: /host/path/to/job/artifacts
diff --git a/theodolite-benchmarks/definitions/uc2-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc2-flink/resources/taskmanager-deployment.yaml
index add5ee6c770f8a4fa1cd8685009ae0fba85ac459..66f81b143cbdbb733e5a81c00de0d20aca13af39 100644
--- a/theodolite-benchmarks/definitions/uc2-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc2-flink/resources/taskmanager-deployment.yaml
@@ -18,32 +18,6 @@ spec:
       containers:
         - name: taskmanager
           image: ghcr.io/cau-se/theodolite-uc2-flink:latest
-          env:
-            - name: KAFKA_BOOTSTRAP_SERVERS
-              value: "theodolite-kafka-kafka-bootstrap:9092"
-            - name: SCHEMA_REGISTRY_URL
-              value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
-            - name: CHECKPOINTING
-              value: "false"
-            - name: PARALLELISM
-              value: "1"
-            - name: "FLINK_STATE_BACKEND"
-              value: "rocksdb"
-            - name: JOB_MANAGER_RPC_ADDRESS
-              value: "flink-jobmanager"
-            - name: TASK_MANAGER_NUMBER_OF_TASK_SLOTS
-              value: "1" #TODO
-            - name: FLINK_PROPERTIES
-              value: |+
-                blob.server.port: 6124
-                jobmanager.rpc.port: 6123
-                taskmanager.rpc.port: 6122
-                queryable-state.proxy.ports: 6125
-                jobmanager.memory.process.size: 4Gb
-                taskmanager.memory.process.size: 4Gb
-                #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -62,19 +36,10 @@ spec:
             initialDelaySeconds: 30
             periodSeconds: 60
           volumeMounts:
-            - name: flink-config-volume-rw
+            - name: flink-config-volume
               mountPath: /opt/flink/conf/
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      initContainers:
-        - name: init-taskmanager
-          image: busybox:1.28
-          command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-          volumeMounts:
-            - name: flink-config-volume
-              mountPath: /flink-config/
-            - name: flink-config-volume-rw
-              mountPath: /flink-config-rw/
       volumes:
         - name: flink-config-volume
           configMap:
@@ -84,5 +49,3 @@ spec:
                 path: flink-conf.yaml
               - key: log4j-console.properties
                 path: log4j-console.properties
-        - name: flink-config-volume-rw
-          emptyDir: {}
diff --git a/theodolite-benchmarks/definitions/uc2-flink/uc2-flink-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc2-flink/uc2-flink-benchmark-operator.yaml
index d94fd0863c2b441413449db89da3c72a26e5dc1a..d1cf9e5200f47a6a3317fe114b31231dbb9c3767 100644
--- a/theodolite-benchmarks/definitions/uc2-flink/uc2-flink-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc2-flink/uc2-flink-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc2-flink
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc2
+    sut: flink
 spec:
   sut:
     resources:
@@ -14,7 +18,7 @@ spec:
             - "service-monitor.yaml"
             - "jobmanager-service.yaml"
             - "jobmanager-deployment.yaml"
-            #- "jobmanager-rest-service.yaml"
+            # - "jobmanager-rest-service.yaml"
   loadGenerator:
     resources:
       - configMap:
@@ -32,11 +36,6 @@ spec:
           properties:
             container: "jobmanager"
             variableName: "PARALLELISM"
-        - type: "EnvVarPatcher" # required?
-          resource: "taskmanager-deployment.yaml"
-          properties:
-            container: "taskmanager"
-            variableName: "PARALLELISM"
   loadTypes:
     - typeName: "NumSensors"
       patchers:
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml
index 15eb8baa4219253270066d68cfa9cb4166eafd48..0d9f7372416049afd57bb35056ac39d4d7bdd75e 100644
--- a/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml
@@ -21,10 +21,6 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
-            - name: DOWNSAMPLE_INTERVAL
-              value: "5000"
             #- name: KUBERNETES_DNS_NAME
             #  value: "titan-ccp-aggregation"
             - name: KUBERNETES_NAMESPACE
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-benchmark-operator.yaml
index f3e18578acebad3d70ce9b93f69513d21727cb38..0e26fc60ee848a3f9b2a63d8e3efdfa10e5c2b0a 100644
--- a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-hazelcastjet-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc2-hazelcastjet
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc2
+    sut: hazelcastjet
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc2-kstreams/resources/uc2-kstreams-deployment.yaml b/theodolite-benchmarks/definitions/uc2-kstreams/resources/uc2-kstreams-deployment.yaml
index 47711343508c712e8f46d38feaef6b6057fbdc80..ccd7a5a6e595c2ec1434bbeb091cb1d50305a339 100644
--- a/theodolite-benchmarks/definitions/uc2-kstreams/resources/uc2-kstreams-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc2-kstreams/resources/uc2-kstreams-deployment.yaml
@@ -26,8 +26,6 @@ spec:
               value: "http://theodolite-kafka-schema-registry:8081"
             - name: JAVA_OPTS
               value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite-benchmarks/definitions/uc2-kstreams/uc2-kstreams-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc2-kstreams/uc2-kstreams-benchmark-operator.yaml
index 25acd09d5b75c282dc1a0e6e12764880b89d87b2..34033cd6bddbfbdfa2332a7f56c55340bfda719b 100644
--- a/theodolite-benchmarks/definitions/uc2-kstreams/uc2-kstreams-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc2-kstreams/uc2-kstreams-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc2-kstreams
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc2
+    sut: kstreams
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc3-beam-flink/resources/flink-configuration-configmap.yaml b/theodolite-benchmarks/definitions/uc3-beam-flink/resources/flink-configuration-configmap.yaml
deleted file mode 100644
index 36178e2bebdac96b8648bd6c299009aa49d3fff6..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/definitions/uc3-beam-flink/resources/flink-configuration-configmap.yaml
+++ /dev/null
@@ -1,66 +0,0 @@
-apiVersion: v1
-kind: ConfigMap
-metadata:
-  name: flink-config
-  labels:
-    app: flink
-data:
-  flink-conf.yaml: |+
-    jobmanager.rpc.address: flink-jobmanager
-    taskmanager.numberOfTaskSlots: 1 #TODO
-    #blob.server.port: 6124
-    #jobmanager.rpc.port: 6123
-    #taskmanager.rpc.port: 6122
-    #queryable-state.proxy.ports: 6125
-    #jobmanager.memory.process.size: 4Gb
-    #taskmanager.memory.process.size: 4Gb
-    #parallelism.default: 1 #TODO
-    metrics.reporter.prom.class: org.apache.flink.metrics.prometheus.PrometheusReporter
-    metrics.reporter.prom.interval: 10 SECONDS
-    taskmanager.network.detailed-metrics: true
-  # -> gives metrics about inbound/outbound network queue lengths
-  log4j-console.properties: |+
-    # This affects logging for both user code and Flink
-    rootLogger.level = INFO
-    rootLogger.appenderRef.console.ref = ConsoleAppender
-    rootLogger.appenderRef.rolling.ref = RollingFileAppender
-
-    # Uncomment this if you want to _only_ change Flink's logging
-    #logger.flink.name = org.apache.flink
-    #logger.flink.level = INFO
-
-    # The following lines keep the log level of common libraries/connectors on
-    # log level INFO. The root logger does not override this. You have to manually
-    # change the log levels here.
-    logger.akka.name = akka
-    logger.akka.level = INFO
-    logger.kafka.name= org.apache.kafka
-    logger.kafka.level = INFO
-    logger.hadoop.name = org.apache.hadoop
-    logger.hadoop.level = INFO
-    logger.zookeeper.name = org.apache.zookeeper
-    logger.zookeeper.level = INFO
-
-    # Log all infos to the console
-    appender.console.name = ConsoleAppender
-    appender.console.type = CONSOLE
-    appender.console.layout.type = PatternLayout
-    appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
-
-    # Log all infos in the given rolling file
-    appender.rolling.name = RollingFileAppender
-    appender.rolling.type = RollingFile
-    appender.rolling.append = false
-    appender.rolling.fileName = ${sys:log.file}
-    appender.rolling.filePattern = ${sys:log.file}.%i
-    appender.rolling.layout.type = PatternLayout
-    appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
-    appender.rolling.policies.type = Policies
-    appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
-    appender.rolling.policies.size.size=100MB
-    appender.rolling.strategy.type = DefaultRolloverStrategy
-    appender.rolling.strategy.max = 10
-
-    # Suppress the irrelevant (wrong) warnings from the Netty channel handler
-    logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline
-    logger.netty.level = OFF
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc3-beam-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc3-beam-flink/resources/jobmanager-deployment.yaml
index 1e70c714d0feef09e034566e9e1745b3b310753b..d1b7b9b5166b93325e7b5cb4933d9db5627afcd9 100644
--- a/theodolite-benchmarks/definitions/uc3-beam-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-beam-flink/resources/jobmanager-deployment.yaml
@@ -20,20 +20,22 @@ spec:
           image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
           args: ["standalone-job", "--job-classname", "rocks.theodolite.benchmarks.uc3.beam.flink.Uc3BeamFlink",
                   "--parallelism=$(PARALLELISM)",
-                  "--disableMetrics=true",
-                  "--fasterCopy"]
+                  "--disableMetrics=$(DISABLE_METRICS)",
+                  "--fasterCopy=$(FASTER_COPY)"]
           # optional arguments: ["--job-id", "<job id>", "--fromSavepoint", "/path/to/savepoint", "--allowNonRestoredState"]
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
             - name: CHECKPOINTING
               value: "false"
             - name: PARALLELISM
               value: "1"
+            - name: DISABLE_METRICS
+              value: "true"
+            - name: FASTER_COPY
+              value: "true"
             - name: "FLINK_STATE_BACKEND"
               value: "rocksdb"
             - name: JOB_MANAGER_RPC_ADDRESS
diff --git a/theodolite-benchmarks/definitions/uc3-beam-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc3-beam-flink/resources/taskmanager-deployment.yaml
index e71177f1c7d3595b4dc48a82aebfe839bc2786eb..13028d5ab33fcbce47f095403302260fd8ab31bd 100644
--- a/theodolite-benchmarks/definitions/uc3-beam-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-beam-flink/resources/taskmanager-deployment.yaml
@@ -20,29 +20,8 @@ spec:
           image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
           args: ["taskmanager"]
           env:
-            - name: KAFKA_BOOTSTRAP_SERVERS
-              value: "theodolite-kafka-kafka-bootstrap:9092"
-            - name: SCHEMA_REGISTRY_URL
-              value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
-            - name: CHECKPOINTING
-              value: "false"
-            - name: "FLINK_STATE_BACKEND"
-              value: "rocksdb"
             - name: JOB_MANAGER_RPC_ADDRESS
               value: "flink-jobmanager"
-            # - name: TASK_MANAGER_NUMBER_OF_TASK_SLOTS
-            #   value: "1" #TODO
-            # - name: FLINK_PROPERTIES
-            #   value: |+
-            #     blob.server.port: 6124
-            #     jobmanager.rpc.port: 6123
-            #     taskmanager.rpc.port: 6122
-            #     queryable-state.proxy.ports: 6125
-            #     jobmanager.memory.process.size: 4Gb
-            #     taskmanager.memory.process.size: 4Gb
-            #     #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -54,11 +33,5 @@ spec:
               name: query-state
             - containerPort: 9249
               name: metrics
-          # livenessProbe:
-          #   tcpSocket:
-          #     port: 6122
-          #   initialDelaySeconds: 30
-          #   periodSeconds: 60
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-
diff --git a/theodolite-benchmarks/definitions/uc3-beam-flink/uc3-beam-flink-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc3-beam-flink/uc3-beam-flink-benchmark-operator.yaml
index c5b24f59e66284dc2c34819140fe7fd1db279ca3..263cfe3a613baa51099abc54373525076cd1adfe 100644
--- a/theodolite-benchmarks/definitions/uc3-beam-flink/uc3-beam-flink-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc3-beam-flink/uc3-beam-flink-benchmark-operator.yaml
@@ -2,19 +2,22 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc3-beam-flink
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc3
+    sut: beam-flink
 spec:
   sut:
     resources:
       - configMap:
           name: "benchmark-resources-uc3-beam-flink"
           files:
-          - "flink-configuration-configmap.yaml"
           - "taskmanager-deployment.yaml"
           - "taskmanager-service.yaml"
           - "service-monitor.yaml"
           - "jobmanager-service.yaml"
           - "jobmanager-deployment.yaml"
-          #- "jobmanager-rest-service.yaml"
+          # - "jobmanager-rest-service.yaml"
   loadGenerator:
     resources:
       - configMap:
@@ -32,11 +35,6 @@ spec:
           properties:
             container: "jobmanager"
             variableName: "PARALLELISM"
-        - type: "EnvVarPatcher" # required?
-          resource: "taskmanager-deployment.yaml"
-          properties:
-            container: "taskmanager"
-            variableName: "PARALLELISM"
   loadTypes:
     - typeName: "NumSensors"
       patchers:
@@ -69,4 +67,4 @@ spec:
         numPartitions: 40
         replicationFactor: 1
       - name: "theodolite-.*"
-        removeOnly: True
\ No newline at end of file
+        removeOnly: True
diff --git a/theodolite-benchmarks/definitions/uc3-beam-samza/resources/uc3-beam-samza-deployment.yaml b/theodolite-benchmarks/definitions/uc3-beam-samza/resources/uc3-beam-samza-deployment.yaml
index bb1917a156d6a04277a3ca6e3bccd580d760de21..8ccbf16df6ea95beecd975714a9eb272f76d5a38 100644
--- a/theodolite-benchmarks/definitions/uc3-beam-samza/resources/uc3-beam-samza-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-beam-samza/resources/uc3-beam-samza-deployment.yaml
@@ -25,12 +25,12 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: MAX_SOURCE_PARALLELISM
               value: "1024"
+            - name: ENABLE_METRICS
+              value: "false"
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite-benchmarks/definitions/uc3-beam-samza/uc3-beam-samza-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc3-beam-samza/uc3-beam-samza-benchmark-operator.yaml
index 0b8b3e441b9abf669fbcad5dd57e92798134f6fd..6fde25874d9a0e23fab4b21ef927ba12920aab45 100644
--- a/theodolite-benchmarks/definitions/uc3-beam-samza/uc3-beam-samza-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc3-beam-samza/uc3-beam-samza-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc3-beam-samza
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc3
+    sut: beam-samza
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc3-flink/resources/flink-configuration-configmap.yaml b/theodolite-benchmarks/definitions/uc3-flink/resources/flink-configuration-configmap.yaml
index 321541f6ac8715b8546b964d8ad2b7c28552fbcd..ecda4025087f103613b9700ca733962709c48836 100644
--- a/theodolite-benchmarks/definitions/uc3-flink/resources/flink-configuration-configmap.yaml
+++ b/theodolite-benchmarks/definitions/uc3-flink/resources/flink-configuration-configmap.yaml
@@ -6,19 +6,17 @@ metadata:
     app: flink
 data:
   flink-conf.yaml: |+
-    #jobmanager.rpc.address: flink-jobmanager
-    #taskmanager.numberOfTaskSlots: 1 #TODO
-    #blob.server.port: 6124
-    #jobmanager.rpc.port: 6123
-    #taskmanager.rpc.port: 6122
-    #queryable-state.proxy.ports: 6125
-    #jobmanager.memory.process.size: 4Gb
-    #taskmanager.memory.process.size: 4Gb
-    #parallelism.default: 1 #TODO
+    jobmanager.rpc.address: flink-jobmanager
+    blob.server.port: 6124
+    jobmanager.rpc.port: 6123
+    taskmanager.rpc.port: 6122
+    queryable-state.proxy.ports: 6125
+    jobmanager.memory.process.size: 4Gb
+    taskmanager.memory.process.size: 4Gb
     metrics.reporter.prom.class: org.apache.flink.metrics.prometheus.PrometheusReporter
     metrics.reporter.prom.interval: 10 SECONDS
+    # gives metrics about inbound/outbound network queue lengths
     taskmanager.network.detailed-metrics: true
-  # -> gives metrics about inbound/outbound network queue lengths
   log4j-console.properties: |+
     # This affects logging for both user code and Flink
     rootLogger.level = INFO
@@ -63,4 +61,4 @@ data:
 
     # Suppress the irrelevant (wrong) warnings from the Netty channel handler
     logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline
-    logger.netty.level = OFF
\ No newline at end of file
+    logger.netty.level = OFF
diff --git a/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml
index 90e13d000d48be220d40027aaf648d7e0472bbff..133205b13e1b5da6938ee0e2532849ec28038929 100644
--- a/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml
@@ -23,25 +23,12 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
             - name: CHECKPOINTING
               value: "false"
             - name: PARALLELISM
               value: "1"
             - name: "FLINK_STATE_BACKEND"
               value: "rocksdb"
-            - name: JOB_MANAGER_RPC_ADDRESS
-              value: "flink-jobmanager"
-            - name: FLINK_PROPERTIES
-              value: |+
-                blob.server.port: 6124
-                jobmanager.rpc.port: 6123
-                taskmanager.rpc.port: 6122
-                queryable-state.proxy.ports: 6125
-                jobmanager.memory.process.size: 4Gb
-                taskmanager.memory.process.size: 4Gb
-                #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -63,21 +50,10 @@ spec:
             initialDelaySeconds: 30
             periodSeconds: 60
           volumeMounts:
-            - name: flink-config-volume-rw
+            - name: flink-config-volume
               mountPath: /opt/flink/conf
-#            - name: job-artifacts-volume
-#              mountPath: /opt/flink/usrlib
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      initContainers:
-        - name: init-jobmanager
-          image: busybox:1.28
-          command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-          volumeMounts:
-            - name: flink-config-volume
-              mountPath: /flink-config/
-            - name: flink-config-volume-rw
-              mountPath: /flink-config-rw/
       volumes:
         - name: flink-config-volume
           configMap:
@@ -87,8 +63,3 @@ spec:
                 path: flink-conf.yaml
               - key: log4j-console.properties
                 path: log4j-console.properties
-        - name: flink-config-volume-rw
-          emptyDir: {}
-#        - name: job-artifacts-volume
-#          hostPath:
-#            path: /host/path/to/job/artifacts
diff --git a/theodolite-benchmarks/definitions/uc3-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc3-flink/resources/taskmanager-deployment.yaml
index 7092166f295a8bb85f929775170b67c41cf17612..a9d7da30b0285ac440ac1d4ca3d09bc98c4af37c 100644
--- a/theodolite-benchmarks/definitions/uc3-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-flink/resources/taskmanager-deployment.yaml
@@ -18,32 +18,6 @@ spec:
       containers:
         - name: taskmanager
           image: ghcr.io/cau-se/theodolite-uc3-flink:latest
-          env:
-            - name: KAFKA_BOOTSTRAP_SERVERS
-              value: "theodolite-kafka-kafka-bootstrap:9092"
-            - name: SCHEMA_REGISTRY_URL
-              value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
-            - name: CHECKPOINTING
-              value: "false"
-            - name: PARALLELISM
-              value: "1"
-            - name: "FLINK_STATE_BACKEND"
-              value: "rocksdb"
-            - name: JOB_MANAGER_RPC_ADDRESS
-              value: "flink-jobmanager"
-            - name: TASK_MANAGER_NUMBER_OF_TASK_SLOTS
-              value: "1" #TODO
-            - name: FLINK_PROPERTIES
-              value: |+
-                blob.server.port: 6124
-                jobmanager.rpc.port: 6123
-                taskmanager.rpc.port: 6122
-                queryable-state.proxy.ports: 6125
-                jobmanager.memory.process.size: 4Gb
-                taskmanager.memory.process.size: 4Gb
-                #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -62,19 +36,10 @@ spec:
             initialDelaySeconds: 30
             periodSeconds: 60
           volumeMounts:
-            - name: flink-config-volume-rw
+            - name: flink-config-volume
               mountPath: /opt/flink/conf/
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      initContainers:
-        - name: init-taskmanager
-          image: busybox:1.28
-          command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-          volumeMounts:
-            - name: flink-config-volume
-              mountPath: /flink-config/
-            - name: flink-config-volume-rw
-              mountPath: /flink-config-rw/
       volumes:
         - name: flink-config-volume
           configMap:
@@ -84,5 +49,3 @@ spec:
                 path: flink-conf.yaml
               - key: log4j-console.properties
                 path: log4j-console.properties
-        - name: flink-config-volume-rw
-          emptyDir: {}
diff --git a/theodolite-benchmarks/definitions/uc3-flink/uc3-flink-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc3-flink/uc3-flink-benchmark-operator.yaml
index f8ec54171047a55f65327ef6bbfe6401b11be19d..ce0bfc5ba193b8fce2a057f50aa0d2eb284afb3f 100644
--- a/theodolite-benchmarks/definitions/uc3-flink/uc3-flink-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc3-flink/uc3-flink-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc3-flink
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc3
+    sut: flink
 spec:
   sut:
     resources:
@@ -14,7 +18,7 @@ spec:
             - "service-monitor.yaml"
             - "jobmanager-service.yaml"
             - "jobmanager-deployment.yaml"
-            #- "jobmanager-rest-service.yaml"
+            # - "jobmanager-rest-service.yaml"
   loadGenerator:
     resources:
       - configMap:
@@ -32,11 +36,6 @@ spec:
           properties:
             container: "jobmanager"
             variableName: "PARALLELISM"
-        - type: "EnvVarPatcher" # required?
-          resource: "taskmanager-deployment.yaml"
-          properties:
-            container: "taskmanager"
-            variableName: "PARALLELISM"
   loadTypes:
     - typeName: "NumSensors"
       patchers:
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml
index 491bee67b531d983ec3dfb2b008dc0b50994c30c..248dccb83191f273f073912905896be192df8b4c 100644
--- a/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml
@@ -21,12 +21,6 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
-            - name: WINDOW_SIZE_IN_SECONDS
-              value: "50"
-            - name: HOPPING_SIZE_IN_SECONDS
-              value: "1"
             #- name: KUBERNETES_DNS_NAME
             #  value: "titan-ccp-aggregation"
             - name: KUBERNETES_NAMESPACE
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-benchmark-operator.yaml
index 1b900585b23b2af958dc7914196cd1b744c4fd39..33a37157df3753dea4eca266fd48c36394ef8e64 100644
--- a/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-hazelcastjet-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc3-hazelcastjet
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc3
+    sut: hazelcastjet
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc3-kstreams/resources/uc3-kstreams-deployment.yaml b/theodolite-benchmarks/definitions/uc3-kstreams/resources/uc3-kstreams-deployment.yaml
index 1a98c1a5edf2b4b0dc3b8fbb155c56c347ce22fa..6da2e5dee94fc2f65e2c4a55184654a384ffbd2f 100644
--- a/theodolite-benchmarks/definitions/uc3-kstreams/resources/uc3-kstreams-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-kstreams/resources/uc3-kstreams-deployment.yaml
@@ -26,8 +26,6 @@ spec:
               value: "http://theodolite-kafka-schema-registry:8081"
             - name: JAVA_OPTS
               value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite-benchmarks/definitions/uc3-kstreams/uc3-kstreams-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc3-kstreams/uc3-kstreams-benchmark-operator.yaml
index c527f20a091d245365769732ea97eebdbcc63370..b352cc4fe80ee500d4dc1f526f4b3a6b39a895eb 100644
--- a/theodolite-benchmarks/definitions/uc3-kstreams/uc3-kstreams-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc3-kstreams/uc3-kstreams-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc3-kstreams
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc3
+    sut: kstreams
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc4-beam-flink/resources/flink-configuration-configmap.yaml b/theodolite-benchmarks/definitions/uc4-beam-flink/resources/flink-configuration-configmap.yaml
deleted file mode 100644
index 36178e2bebdac96b8648bd6c299009aa49d3fff6..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/definitions/uc4-beam-flink/resources/flink-configuration-configmap.yaml
+++ /dev/null
@@ -1,66 +0,0 @@
-apiVersion: v1
-kind: ConfigMap
-metadata:
-  name: flink-config
-  labels:
-    app: flink
-data:
-  flink-conf.yaml: |+
-    jobmanager.rpc.address: flink-jobmanager
-    taskmanager.numberOfTaskSlots: 1 #TODO
-    #blob.server.port: 6124
-    #jobmanager.rpc.port: 6123
-    #taskmanager.rpc.port: 6122
-    #queryable-state.proxy.ports: 6125
-    #jobmanager.memory.process.size: 4Gb
-    #taskmanager.memory.process.size: 4Gb
-    #parallelism.default: 1 #TODO
-    metrics.reporter.prom.class: org.apache.flink.metrics.prometheus.PrometheusReporter
-    metrics.reporter.prom.interval: 10 SECONDS
-    taskmanager.network.detailed-metrics: true
-  # -> gives metrics about inbound/outbound network queue lengths
-  log4j-console.properties: |+
-    # This affects logging for both user code and Flink
-    rootLogger.level = INFO
-    rootLogger.appenderRef.console.ref = ConsoleAppender
-    rootLogger.appenderRef.rolling.ref = RollingFileAppender
-
-    # Uncomment this if you want to _only_ change Flink's logging
-    #logger.flink.name = org.apache.flink
-    #logger.flink.level = INFO
-
-    # The following lines keep the log level of common libraries/connectors on
-    # log level INFO. The root logger does not override this. You have to manually
-    # change the log levels here.
-    logger.akka.name = akka
-    logger.akka.level = INFO
-    logger.kafka.name= org.apache.kafka
-    logger.kafka.level = INFO
-    logger.hadoop.name = org.apache.hadoop
-    logger.hadoop.level = INFO
-    logger.zookeeper.name = org.apache.zookeeper
-    logger.zookeeper.level = INFO
-
-    # Log all infos to the console
-    appender.console.name = ConsoleAppender
-    appender.console.type = CONSOLE
-    appender.console.layout.type = PatternLayout
-    appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
-
-    # Log all infos in the given rolling file
-    appender.rolling.name = RollingFileAppender
-    appender.rolling.type = RollingFile
-    appender.rolling.append = false
-    appender.rolling.fileName = ${sys:log.file}
-    appender.rolling.filePattern = ${sys:log.file}.%i
-    appender.rolling.layout.type = PatternLayout
-    appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
-    appender.rolling.policies.type = Policies
-    appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
-    appender.rolling.policies.size.size=100MB
-    appender.rolling.strategy.type = DefaultRolloverStrategy
-    appender.rolling.strategy.max = 10
-
-    # Suppress the irrelevant (wrong) warnings from the Netty channel handler
-    logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline
-    logger.netty.level = OFF
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc4-beam-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc4-beam-flink/resources/jobmanager-deployment.yaml
index 2cd558313e07ed35945efcb2a16528911bdf3121..7dbcd62ccd2f99d6a66fc891511f7ebdd1897e51 100644
--- a/theodolite-benchmarks/definitions/uc4-beam-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-beam-flink/resources/jobmanager-deployment.yaml
@@ -20,20 +20,22 @@ spec:
           image: ghcr.io/cau-se/theodolite-uc4-beam-flink:latest
           args: ["standalone-job", "--job-classname", "rocks.theodolite.benchmarks.uc4.beam.flink.Uc4BeamFlink",
                   "--parallelism=$(PARALLELISM)",
-                  "--disableMetrics=true",
-                  "--fasterCopy"]
+                  "--disableMetrics=$(DISABLE_METRICS)",
+                  "--fasterCopy=$(FASTER_COPY)"]
           # optional arguments: ["--job-id", "<job id>", "--fromSavepoint", "/path/to/savepoint", "--allowNonRestoredState"]
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
             - name: CHECKPOINTING
               value: "false"
             - name: PARALLELISM
               value: "1"
+            - name: DISABLE_METRICS
+              value: "true"
+            - name: FASTER_COPY
+              value: "true"
             - name: "FLINK_STATE_BACKEND"
               value: "rocksdb"
             - name: JOB_MANAGER_RPC_ADDRESS
diff --git a/theodolite-benchmarks/definitions/uc4-beam-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc4-beam-flink/resources/taskmanager-deployment.yaml
index 75718b28564a2ffca7f684eccaab26b9caff6a5f..062ef653bbdf280c0aa31aa7ce476bc02c9993a1 100644
--- a/theodolite-benchmarks/definitions/uc4-beam-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-beam-flink/resources/taskmanager-deployment.yaml
@@ -20,29 +20,8 @@ spec:
           image: ghcr.io/cau-se/theodolite-uc4-beam-flink:latest
           args: ["taskmanager"]
           env:
-            - name: KAFKA_BOOTSTRAP_SERVERS
-              value: "theodolite-kafka-kafka-bootstrap:9092"
-            - name: SCHEMA_REGISTRY_URL
-              value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
-            - name: CHECKPOINTING
-              value: "false"
-            - name: "FLINK_STATE_BACKEND"
-              value: "rocksdb"
             - name: JOB_MANAGER_RPC_ADDRESS
               value: "flink-jobmanager"
-            # - name: TASK_MANAGER_NUMBER_OF_TASK_SLOTS
-            #   value: "1" #TODO
-            # - name: FLINK_PROPERTIES
-            #   value: |+
-            #     blob.server.port: 6124
-            #     jobmanager.rpc.port: 6123
-            #     taskmanager.rpc.port: 6122
-            #     queryable-state.proxy.ports: 6125
-            #     jobmanager.memory.process.size: 4Gb
-            #     taskmanager.memory.process.size: 4Gb
-            #     #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -54,11 +33,5 @@ spec:
               name: query-state
             - containerPort: 9249
               name: metrics
-          # livenessProbe:
-          #   tcpSocket:
-          #     port: 6122
-          #   initialDelaySeconds: 30
-          #   periodSeconds: 60
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-
diff --git a/theodolite-benchmarks/definitions/uc4-beam-flink/uc4-beam-flink-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc4-beam-flink/uc4-beam-flink-benchmark-operator.yaml
index 0b2497e3ef87895b35982cf3880583407fd5266f..6a9e33b403c626239e2d914f2cd51bf6ab79cc16 100644
--- a/theodolite-benchmarks/definitions/uc4-beam-flink/uc4-beam-flink-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc4-beam-flink/uc4-beam-flink-benchmark-operator.yaml
@@ -2,19 +2,22 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc4-beam-flink
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc4
+    sut: beam-flink
 spec:
   sut:
     resources:
       - configMap:
           name: "benchmark-resources-uc4-beam-flink"
           files:
-          - "flink-configuration-configmap.yaml"
           - "taskmanager-deployment.yaml"
           - "taskmanager-service.yaml"
           - "service-monitor.yaml"
           - "jobmanager-service.yaml"
           - "jobmanager-deployment.yaml"
-          #- "jobmanager-rest-service.yaml"
+          # - "jobmanager-rest-service.yaml"
   loadGenerator:
     resources:
       - configMap:
@@ -32,11 +35,6 @@ spec:
           properties:
             container: "jobmanager"
             variableName: "PARALLELISM"
-        - type: "EnvVarPatcher" # required?
-          resource: "taskmanager-deployment.yaml"
-          properties:
-            container: "taskmanager"
-            variableName: "PARALLELISM"
   loadTypes:
     - typeName: "NumNestedGroups"
       patchers:
@@ -76,4 +74,4 @@ spec:
         numPartitions: 40
         replicationFactor: 1
       - name: "theodolite-.*"
-        removeOnly: True
\ No newline at end of file
+        removeOnly: True
diff --git a/theodolite-benchmarks/definitions/uc4-beam-samza/resources/uc4-beam-samza-deployment.yaml b/theodolite-benchmarks/definitions/uc4-beam-samza/resources/uc4-beam-samza-deployment.yaml
index fccf67c82bbda59a1f31fa0617cd158a99a09f46..506802d0993ceb3504aec01d298f9fb3bd741bde 100644
--- a/theodolite-benchmarks/definitions/uc4-beam-samza/resources/uc4-beam-samza-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-beam-samza/resources/uc4-beam-samza-deployment.yaml
@@ -25,12 +25,12 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: MAX_SOURCE_PARALLELISM
               value: "1024"
+            - name: ENABLE_METRICS
+              value: "false"
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite-benchmarks/definitions/uc4-beam-samza/uc4-beam-samza-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc4-beam-samza/uc4-beam-samza-benchmark-operator.yaml
index 62d36fc8adfb85418c9a9ad9941f004667cfcc62..b07976d82d70157d5f8ff1e7ecf498168f12a269 100644
--- a/theodolite-benchmarks/definitions/uc4-beam-samza/uc4-beam-samza-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc4-beam-samza/uc4-beam-samza-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc4-beam-samza
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc4
+    sut: beam-samza
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc4-flink/resources/flink-configuration-configmap.yaml b/theodolite-benchmarks/definitions/uc4-flink/resources/flink-configuration-configmap.yaml
index 321541f6ac8715b8546b964d8ad2b7c28552fbcd..ecda4025087f103613b9700ca733962709c48836 100644
--- a/theodolite-benchmarks/definitions/uc4-flink/resources/flink-configuration-configmap.yaml
+++ b/theodolite-benchmarks/definitions/uc4-flink/resources/flink-configuration-configmap.yaml
@@ -6,19 +6,17 @@ metadata:
     app: flink
 data:
   flink-conf.yaml: |+
-    #jobmanager.rpc.address: flink-jobmanager
-    #taskmanager.numberOfTaskSlots: 1 #TODO
-    #blob.server.port: 6124
-    #jobmanager.rpc.port: 6123
-    #taskmanager.rpc.port: 6122
-    #queryable-state.proxy.ports: 6125
-    #jobmanager.memory.process.size: 4Gb
-    #taskmanager.memory.process.size: 4Gb
-    #parallelism.default: 1 #TODO
+    jobmanager.rpc.address: flink-jobmanager
+    blob.server.port: 6124
+    jobmanager.rpc.port: 6123
+    taskmanager.rpc.port: 6122
+    queryable-state.proxy.ports: 6125
+    jobmanager.memory.process.size: 4Gb
+    taskmanager.memory.process.size: 4Gb
     metrics.reporter.prom.class: org.apache.flink.metrics.prometheus.PrometheusReporter
     metrics.reporter.prom.interval: 10 SECONDS
+    # gives metrics about inbound/outbound network queue lengths
     taskmanager.network.detailed-metrics: true
-  # -> gives metrics about inbound/outbound network queue lengths
   log4j-console.properties: |+
     # This affects logging for both user code and Flink
     rootLogger.level = INFO
@@ -63,4 +61,4 @@ data:
 
     # Suppress the irrelevant (wrong) warnings from the Netty channel handler
     logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline
-    logger.netty.level = OFF
\ No newline at end of file
+    logger.netty.level = OFF
diff --git a/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml
index fe8f232170406d06f7339455e1488c716d0326db..56c8066e209a1c3a41924ad320cc2fe0a9282fa0 100644
--- a/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml
@@ -23,25 +23,12 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
             - name: CHECKPOINTING
               value: "false"
             - name: PARALLELISM
               value: "1"
             - name: "FLINK_STATE_BACKEND"
               value: "rocksdb"
-            - name: JOB_MANAGER_RPC_ADDRESS
-              value: "flink-jobmanager"
-            - name: FLINK_PROPERTIES
-              value: |+
-                blob.server.port: 6124
-                jobmanager.rpc.port: 6123
-                taskmanager.rpc.port: 6122
-                queryable-state.proxy.ports: 6125
-                jobmanager.memory.process.size: 4Gb
-                taskmanager.memory.process.size: 4Gb
-                #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -63,21 +50,10 @@ spec:
             initialDelaySeconds: 30
             periodSeconds: 60
           volumeMounts:
-            - name: flink-config-volume-rw
+            - name: flink-config-volume
               mountPath: /opt/flink/conf
-#            - name: job-artifacts-volume
-#              mountPath: /opt/flink/usrlib
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      initContainers:
-        - name: init-jobmanager
-          image: busybox:1.28
-          command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-          volumeMounts:
-            - name: flink-config-volume
-              mountPath: /flink-config/
-            - name: flink-config-volume-rw
-              mountPath: /flink-config-rw/
       volumes:
         - name: flink-config-volume
           configMap:
@@ -87,8 +63,3 @@ spec:
                 path: flink-conf.yaml
               - key: log4j-console.properties
                 path: log4j-console.properties
-        - name: flink-config-volume-rw
-          emptyDir: {}
-#        - name: job-artifacts-volume
-#          hostPath:
-#            path: /host/path/to/job/artifacts
diff --git a/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
index 7092166f295a8bb85f929775170b67c41cf17612..a9d7da30b0285ac440ac1d4ca3d09bc98c4af37c 100644
--- a/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
@@ -18,32 +18,6 @@ spec:
       containers:
         - name: taskmanager
           image: ghcr.io/cau-se/theodolite-uc3-flink:latest
-          env:
-            - name: KAFKA_BOOTSTRAP_SERVERS
-              value: "theodolite-kafka-kafka-bootstrap:9092"
-            - name: SCHEMA_REGISTRY_URL
-              value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS
-              value: "100"
-            - name: CHECKPOINTING
-              value: "false"
-            - name: PARALLELISM
-              value: "1"
-            - name: "FLINK_STATE_BACKEND"
-              value: "rocksdb"
-            - name: JOB_MANAGER_RPC_ADDRESS
-              value: "flink-jobmanager"
-            - name: TASK_MANAGER_NUMBER_OF_TASK_SLOTS
-              value: "1" #TODO
-            - name: FLINK_PROPERTIES
-              value: |+
-                blob.server.port: 6124
-                jobmanager.rpc.port: 6123
-                taskmanager.rpc.port: 6122
-                queryable-state.proxy.ports: 6125
-                jobmanager.memory.process.size: 4Gb
-                taskmanager.memory.process.size: 4Gb
-                #parallelism.default: 1 #TODO
           resources:
             limits:
               memory: 4Gi
@@ -62,19 +36,10 @@ spec:
             initialDelaySeconds: 30
             periodSeconds: 60
           volumeMounts:
-            - name: flink-config-volume-rw
+            - name: flink-config-volume
               mountPath: /opt/flink/conf/
           securityContext:
             runAsUser: 9999  # refers to user _flink_ from official flink image, change if necessary
-      initContainers:
-        - name: init-taskmanager
-          image: busybox:1.28
-          command: ['cp', '-a', '/flink-config/.', '/flink-config-rw/']
-          volumeMounts:
-            - name: flink-config-volume
-              mountPath: /flink-config/
-            - name: flink-config-volume-rw
-              mountPath: /flink-config-rw/
       volumes:
         - name: flink-config-volume
           configMap:
@@ -84,5 +49,3 @@ spec:
                 path: flink-conf.yaml
               - key: log4j-console.properties
                 path: log4j-console.properties
-        - name: flink-config-volume-rw
-          emptyDir: {}
diff --git a/theodolite-benchmarks/definitions/uc4-flink/uc4-flink-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc4-flink/uc4-flink-benchmark-operator.yaml
index 042ec9989731ef44496156f41ad2d86b5d3964df..b1b563bb415deea9978f9887a581f02b0e086497 100644
--- a/theodolite-benchmarks/definitions/uc4-flink/uc4-flink-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc4-flink/uc4-flink-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc4-flink
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc4
+    sut: flink
 spec:
   sut:
     resources:
@@ -32,11 +36,6 @@ spec:
           properties:
             container: "jobmanager"
             variableName: "PARALLELISM"
-        - type: "EnvVarPatcher" # required?
-          resource: "taskmanager-deployment.yaml"
-          properties:
-            container: "taskmanager"
-            variableName: "PARALLELISM"
   loadTypes:
     - typeName: "NumNestedGroups"
       patchers:
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml
index 1c5272cb6adf05579f79125f300836740450e361..06fabde838022e17faceef6df30fe7bfd6198e66 100644
--- a/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml
@@ -21,10 +21,6 @@ spec:
               value: "theodolite-kafka-kafka-bootstrap:9092"
             - name: SCHEMA_REGISTRY_URL
               value: "http://theodolite-kafka-schema-registry:8081"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
-            - name: WINDOW_SIZE
-              value: "5000"
             #- name: KUBERNETES_DNS_NAME
             #  value: "titan-ccp-aggregation"
             - name: KUBERNETES_NAMESPACE
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-benchmark-operator.yaml
index 6a69c0af8206fc3906c8679cb137745fab255f35..79c0fdbf7f674f45d5531819cf214f0bc3f07d1c 100644
--- a/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-hazelcastjet-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc4-hazelcastjet
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc4
+    sut: hazelcastjet
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/definitions/uc4-kstreams/resources/uc4-kstreams-deployment.yaml b/theodolite-benchmarks/definitions/uc4-kstreams/resources/uc4-kstreams-deployment.yaml
index bc083914431fa027308205825730afd5a19cc250..848d1d3a1e416412b157963afd7fc98431a0049a 100644
--- a/theodolite-benchmarks/definitions/uc4-kstreams/resources/uc4-kstreams-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-kstreams/resources/uc4-kstreams-deployment.yaml
@@ -26,8 +26,6 @@ spec:
               value: "http://theodolite-kafka-schema-registry:8081"
             - name: JAVA_OPTS
               value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite-benchmarks/definitions/uc4-kstreams/uc4-kstreams-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc4-kstreams/uc4-kstreams-benchmark-operator.yaml
index 06e692f608698675a89f1236438483d300de5101..8d47e927de7fd774882cf4746233dba63e4703aa 100644
--- a/theodolite-benchmarks/definitions/uc4-kstreams/uc4-kstreams-benchmark-operator.yaml
+++ b/theodolite-benchmarks/definitions/uc4-kstreams/uc4-kstreams-benchmark-operator.yaml
@@ -2,6 +2,10 @@ apiVersion: theodolite.rocks/v1beta1
 kind: benchmark
 metadata:
   name: uc4-kstreams
+  labels:
+    suite: theodolite-stream-processing
+    benchmark: uc4
+    sut: kstreams
 spec:
   sut:
     resources:
diff --git a/theodolite-benchmarks/docker-test/README.md b/theodolite-benchmarks/docker-test/README.md
index 5d7ca3f4ac470202579f154fe8f066a246c84d23..8f800848f5674a34723ec44e01767359af0b0475 100644
--- a/theodolite-benchmarks/docker-test/README.md
+++ b/theodolite-benchmarks/docker-test/README.md
@@ -1,8 +1,8 @@
 # Docker Compose Files for Testing
 
 This directory contains Docker Compose files, which help testing Benchmark implementations.
-For each stream processing engine (Kafka Streams and Flink) and Benchmark (UC1-4), a Docker Compose file is provided
-in the corresponding subdirectory.
+For each stream processing engine (Kafka Streams, Flink, Hazelcast Jet, Beam/Flink and Beam/Samza) and Benchmark
+(UC1-4), a Docker Compose file is provided in the corresponding subdirectory.
 
 ## Full Dockerized Testing
 
diff --git a/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml
index 2212c3b539045114f31760d605ad928e237ed924..8e7f2243ddd32ab3f9d1be13487945e1867f6851 100644
--- a/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml
@@ -51,6 +51,7 @@ services:
       SAMZA_SYSTEMS_KAFKA_CONSUMER_BOOTSTRAP_SERVERS: kafka:9092
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      ENABLE_METRICS: "false"
   load-generator: 
     image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
diff --git a/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml
index cc6bc7a7112c35f11ce9cfd27d09aebe401c8c51..ed6d9ef975853b24fec6e1cb7ad081203ddd2666 100644
--- a/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml
@@ -51,6 +51,7 @@ services:
       SAMZA_SYSTEMS_KAFKA_CONSUMER_BOOTSTRAP_SERVERS: kafka:9092
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      ENABLE_METRICS: "false"
   load-generator: 
     image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
diff --git a/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml
index 92b90823e31f79e68b301ff039618c9520c92019..55b0c457e5eabc2d58687d60cc1f95fd2d0e6023 100644
--- a/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml
@@ -50,7 +50,6 @@ services:
       BOOTSTRAP_SERVER: benchmark:5701
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
-      DOWNSAMPLE_INTERVAL: 5000
   load-generator: 
     image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
diff --git a/theodolite-benchmarks/docker-test/uc2-kstreams/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-kstreams/docker-compose.yml
index efdba90bef634bab76012316f67b0f9be9f79c77..f378fe86a183c370b9e11ab141d105531d1ff40e 100755
--- a/theodolite-benchmarks/docker-test/uc2-kstreams/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc2-kstreams/docker-compose.yml
@@ -45,7 +45,6 @@ services:
     environment:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
-      KAFKA_WINDOW_DURATION_MINUTES: 60
   load-generator: 
     image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
diff --git a/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml
index bf120f31dbfda2384b314ba4a90a25362f37b6c2..c6367202a8681f2cf0f9c0fe952e920e898c915c 100644
--- a/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml
@@ -51,6 +51,7 @@ services:
       SAMZA_SYSTEMS_KAFKA_CONSUMER_BOOTSTRAP_SERVERS: kafka:9092
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      ENABLE_METRICS: "false"
   load-generator: 
     image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
diff --git a/theodolite-benchmarks/docker-test/uc3-flink/test.sh b/theodolite-benchmarks/docker-test/uc3-flink/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..217a730f73fa1fee3f875da34edd9047ed9221db
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-flink/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 600 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
\ No newline at end of file
diff --git a/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml
index d7c3fe7a017c24e0b212661f0b0b34c2a1fee32c..da77dcc1c0667c01e2e9cf7e08c6230fe1dc2837 100644
--- a/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml
@@ -10,8 +10,8 @@ services:
     image: wurstmeister/kafka
     expose:
       - "9092"
-    #ports:
-    #  - 19092:19092
+    # ports:
+    #   - 19092:19092
     environment:
       KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
       KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
@@ -32,7 +32,7 @@ services:
     restart: "on-failure"
     expose:
       - "8081"
-    #ports:
+    # ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
@@ -44,14 +44,12 @@ services:
       - kafka
     expose:
       - 5701
-    #ports:
-    #  - 5701:5701
+    # ports:
+    #   - 5701:5701
     environment:
       BOOTSTRAP_SERVER: benchmark:5701
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
-      WINDOW_SIZE_IN_SECONDS: 50
-      HOPPING_SIZE_IN_SECONDS: 5
   load-generator:
     image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
diff --git a/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml
index d236af7d284ebb085c78110feb6001cb28d18290..51011d2d2645c8542724d7f84f29c9cdae970e8e 100644
--- a/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml
@@ -51,6 +51,7 @@ services:
       SAMZA_SYSTEMS_KAFKA_CONSUMER_BOOTSTRAP_SERVERS: kafka:9092
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      ENABLE_METRICS: "false"
   load-generator: 
     image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
diff --git a/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml
index ca6f3c85fe670e53bafd6a56e568cad9166ae501..ef95b07ee5dcaf47f0738d2a6e018c7347d944bb 100644
--- a/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml
@@ -50,7 +50,6 @@ services:
       BOOTSTRAP_SERVER: benchmark:5701
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
-      WINDOW_SIZE_UC4: 5000
   load-generator: 
     image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
diff --git a/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/AbstractFlinkService.java b/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/AbstractFlinkService.java
index f348543cd9897bc3abf1871ce828c22ea531dd4c..03e79788631d0e236a78cf019bef3984bcd1ba40 100644
--- a/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/AbstractFlinkService.java
+++ b/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/AbstractFlinkService.java
@@ -60,8 +60,8 @@ public abstract class AbstractFlinkService {
 
   protected void configureCheckpointing() {
     final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true);
-    final int commitIntervalMs = this.config.getInt(ConfigurationKeys.COMMIT_INTERVAL_MS);
-    LOGGER.info("Set parallelism to: {}.", checkpointing);
+    final int commitIntervalMs = this.config.getInt(ConfigurationKeys.CHECKPOINTING_INTERVAL_MS);
+    LOGGER.info("Set checkpointing to: {}.", checkpointing);
     if (checkpointing) {
       this.env.enableCheckpointing(commitIntervalMs);
     }
diff --git a/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/ConfigurationKeys.java b/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/ConfigurationKeys.java
index 9eb143c3c07f879de37eafa2fbe6729bf182d45e..396ca98675fb8ceae818fb9eeeec7b23d9b1aba6 100644
--- a/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/ConfigurationKeys.java
+++ b/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/ConfigurationKeys.java
@@ -19,11 +19,9 @@ public final class ConfigurationKeys {
 
   public static final String SCHEMA_REGISTRY_URL = "schema.registry.url";
 
-  public static final String WINDOW_SIZE_MS = "window.size.ms";
-
-  public static final String WINDOW_GRACE_MS = "window.grace.ms";
+  public static final String CHECKPOINTING = "checkpointing";
 
-  public static final String COMMIT_INTERVAL_MS = "commit.interval.ms";
+  public static final String CHECKPOINTING_INTERVAL_MS = "checkpointing.interval.ms";
 
   public static final String FLINK_STATE_BACKEND = "flink.state.backend";
 
@@ -34,8 +32,6 @@ public final class ConfigurationKeys {
 
   public static final String DEBUG = "debug";
 
-  public static final String CHECKPOINTING = "checkpointing";
-
   public static final String PARALLELISM = "parallelism";
 
   private ConfigurationKeys() {}
diff --git a/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/KafkaConnectorFactory.java b/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/KafkaConnectorFactory.java
index a17403f07b8dcc9d3034222035428ecae6b7f860..549b0e7f1bf1cccb5dbc00100f1e4e32139910f4 100644
--- a/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/KafkaConnectorFactory.java
+++ b/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/KafkaConnectorFactory.java
@@ -21,7 +21,9 @@ import rocks.theodolite.benchmarks.commons.flink.util.SerializableSupplier;
 /**
  * A class for creating {@link FlinkKafkaConsumer} and {@link FlinkKafkaProducer}.
  */
-public class KafkaConnectorFactory {
+public class KafkaConnectorFactory { // NOPMD
+
+  private static final String AUTO_OFFSET_RESET_EARLIEST = "earliest";
 
   private static final Duration PRODUCER_TRANSACTION_TIMEOUT = Duration.ofMinutes(5);
 
@@ -50,7 +52,7 @@ public class KafkaConnectorFactory {
   public <T> FlinkKafkaConsumer<T> createConsumer(final String topic,
       final DeserializationSchema<T> deserializationSchema) {
     return this.createBaseConsumer(
-        new FlinkKafkaConsumer<>(topic, deserializationSchema, this.cloneProperties()));
+        new FlinkKafkaConsumer<>(topic, deserializationSchema, this.buildConsumerProperties()));
   }
 
   /**
@@ -60,7 +62,7 @@ public class KafkaConnectorFactory {
   public <T> FlinkKafkaConsumer<T> createConsumer(final String topic,
       final KafkaDeserializationSchema<T> deserializationSchema) {
     return this.createBaseConsumer(
-        new FlinkKafkaConsumer<>(topic, deserializationSchema, this.cloneProperties()));
+        new FlinkKafkaConsumer<>(topic, deserializationSchema, this.buildConsumerProperties()));
   }
 
   /**
@@ -145,6 +147,14 @@ public class KafkaConnectorFactory {
     return producerProps;
   }
 
+  private Properties buildConsumerProperties() {
+    final Properties consumerProps = this.cloneProperties();
+    consumerProps.setProperty(
+        ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
+        AUTO_OFFSET_RESET_EARLIEST);
+    return consumerProps;
+  }
+
   private Properties cloneProperties() {
     final Properties props = new Properties();
     props.putAll(this.kafkaProps);
diff --git a/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/serialization/StatsSerializer.java b/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/serialization/StatsSerializer.java
index 881f5870a4dda3085d1391aea016f61018627029..4afd05e938d3de264a6c07e156229df02bebda37 100644
--- a/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/serialization/StatsSerializer.java
+++ b/theodolite-benchmarks/flink-commons/src/main/java/rocks/theodolite/benchmarks/commons/flink/serialization/StatsSerializer.java
@@ -8,7 +8,8 @@ import com.google.common.math.Stats;
 import java.io.Serializable;
 
 /**
- * Custom Kryo {@link Serializer} for efficient transmission between Flink instances.
+ * Custom Kryo {@link Serializer}for {@link Stats} objects for efficient transmission between Flink
+ * instances.
  */
 public class StatsSerializer extends Serializer<Stats> implements Serializable {
 
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java
index d1705888430c92ee0cec50ea06871746bbe06cb5..141b5a427d33ad50bb4881d0063c3069ec4ac6ac 100644
--- a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java
@@ -5,29 +5,22 @@ package rocks.theodolite.benchmarks.commons.hazelcastjet;
  */
 public class ConfigurationKeys {
 
-  // Common Keys
+  public static final String APPLICATION_NAME = "application.name";
+
   public static final String BOOTSTRAP_SERVER = "BOOTSTRAP_SERVER";
+
   public static final String KUBERNETES_DNS_NAME = "KUBERNETES_DNS_NAME";
+
   public static final String PORT = "PORT";
+
   public static final String PORT_AUTO_INCREMENT = "PORT_AUTO_INCREMENT";
+
   public static final String CLUSTER_NAME_PREFIX = "CLUSTER_NAME_PREFIX";
-  public static final String KAFKA_BOOTSTRAP_SERVERS = "KAFKA_BOOTSTRAP_SERVERS";
-  public static final String SCHEMA_REGISTRY_URL = "SCHEMA_REGISTRY_URL";
-  public static final String KAFKA_INPUT_TOPIC = "KAFKA_INPUT_TOPIC";
 
-  // Additional topics
-  public static final String KAFKA_OUTPUT_TOPIC = "KAFKA_OUTPUT_TOPIC";
+  public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
 
-  // UC2
-  public static final String DOWNSAMPLE_INTERVAL = "DOWNSAMPLE_INTERVAL";
+  public static final String SCHEMA_REGISTRY_URL = "schema.registry.url";
 
-  // UC3
-  public static final String WINDOW_SIZE_IN_SECONDS = "WINDOW_SIZE_IN_SECONDS";
-  public static final String HOPPING_SIZE_IN_SECONDS = "HOPPING_SIZE_IN_SECONDS";
+  public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic";
 
-  // UC4
-  public static final String KAFKA_CONFIGURATION_TOPIC = "KAFKA_CONFIGURATION_TOPIC";
-  public static final String KAFKA_FEEDBACK_TOPIC = "KAFKA_FEEDBACK_TOPIC";
-  public static final String WINDOW_SIZE_UC4 = "WINDOW_SIZE";
-  
 }
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/HazelcastJetService.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/HazelcastJetService.java
new file mode 100644
index 0000000000000000000000000000000000000000..34378c427f150ac079e4e67cb96bfb164ceeac4b
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/HazelcastJetService.java
@@ -0,0 +1,76 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import org.apache.commons.configuration2.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import titan.ccp.common.configuration.ServiceConfigurations;
+
+/**
+ * Abstract HazelcastJetService. Holds common fields and logic shared for all hazelcast jet
+ * services. Set common settings and initiates a hazelcast jet instance.
+ */
+public abstract class HazelcastJetService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HazelcastJetService.class);
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+
+  protected final Configuration config = ServiceConfigurations.createWithDefaults();
+  protected final String kafkaBootstrapServer;
+  protected final String schemaRegistryUrl;
+  protected final String jobName;
+
+  protected final String kafkaInputTopic;
+
+  protected PipelineFactory pipelineFactory;
+  protected final JobConfig jobConfig = new JobConfig();
+  protected final KafkaPropertiesBuilder propsBuilder;
+
+  private final JetInstance jetInstance;
+
+
+  /**
+   * Instantiate a new abstract service. Retrieves needed fields using ServiceConfiguration and
+   * build a new jet instance.
+   */
+  public HazelcastJetService(final Logger logger) {
+    this.jobName = this.config.getString(ConfigurationKeys.APPLICATION_NAME);
+
+    this.kafkaBootstrapServer = this.config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
+    this.schemaRegistryUrl = this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL);
+    this.propsBuilder =
+        new KafkaPropertiesBuilder(this.kafkaBootstrapServer, this.schemaRegistryUrl, this.jobName);
+
+    this.kafkaInputTopic = this.config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
+
+    final JetInstanceBuilder jetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, this.kafkaBootstrapServer, HZ_KUBERNETES_SERVICE_DNS_KEY);
+    this.jetInstance = jetInstance.build();
+  }
+
+
+  /**
+   * Constructs and starts the pipeline. First, initiates a pipeline. Second, register the
+   * corresponding serializers. Third, set the job name. Lastly, add the job to the Hazelcast
+   * instance.
+   */
+  public void run() {
+    try {
+      final Pipeline pipeline = this.pipelineFactory.buildPipeline();
+      this.registerSerializer();
+      this.jobConfig.setName(this.config.getString("name"));
+      this.jetInstance.newJobIfAbsent(pipeline, this.jobConfig).join();
+    } catch (final Exception e) { // NOPMD
+      LOGGER.error("ABORT MISSION!:", e);
+    }
+  }
+
+
+  /**
+   * Needs to be implemented by subclasses to register the needed Serializer.
+   */
+  protected abstract void registerSerializer();
+
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java
index cc2ee052d5e2ed7e7b372baf7b59f24ef3e26e8f..3e78518a25abf5b2302c72d712718c1793fd44a5 100644
--- a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java
@@ -42,17 +42,18 @@ public class JetInstanceBuilder {
   }
 
   /**
-   * Builds and returns a JetInstance. If a config is set, the JetInstance will contain the set
-   * config.
+   * Builds and returns a {@link JetInstance}. If a config is set, the {@link JetInstance} will
+   * contain the set config.
    *
    * @return JetInstance
    */
   public JetInstance build() {
     final JetInstance jet = Jet.newJetInstance();
-    if (this.config == null) {
+    final Config localConfig = this.config;
+    if (localConfig == null) {
       return jet;
     } else {
-      jet.getConfig().setHazelcastConfig(this.config);
+      jet.getConfig().setHazelcastConfig(localConfig);
       return jet;
     }
 
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java
index 9bce60f57a6ecb9da4578e08d8f49bbb34af934a..a234d31ee349a768e284fa4234b2bd8dee6d38de 100644
--- a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java
@@ -11,13 +11,78 @@ import org.apache.kafka.clients.producer.ProducerConfig;
 /**
  * Generalized builder for Kafka properties.
  * Will always set AUTO_OFFSET_RESET_CONFIG to earliest
- *
  */
 public class KafkaPropertiesBuilder {
 
   private static final String TRUE = "true";
   private static final String AUTO_OFFSET_RESET_CONFIG = "earliest";
 
+  private static final String SPECIFIC_AVRO_WRITER = "specific.avro.writer";
+
+  private final Properties readProperties;
+
+  private final Properties writeProperties;
+
+  /**
+   * Constructs a new PropertiesBuilder with defined default read and write properties.
+   * @param kafkaBootstrapServer default boostrap address property.
+   * @param schemaRegistryUrl default schema registry address property.
+   * @param jobName default job name property.
+   */
+  public KafkaPropertiesBuilder(final String kafkaBootstrapServer,
+                                final String schemaRegistryUrl,
+                                final String jobName) {
+
+    this.writeProperties = new Properties();
+    this.readProperties = new Properties();
+    readProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServer);
+    readProperties.setProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
+        schemaRegistryUrl);
+
+    writeProperties.putAll(readProperties);
+
+    readProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, jobName);
+    readProperties.setProperty(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, TRUE);
+    readProperties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, AUTO_OFFSET_RESET_CONFIG);
+    readProperties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, TRUE);
+
+    writeProperties.setProperty(SPECIFIC_AVRO_WRITER, TRUE);
+  }
+
+  /**
+   * Returns default read properties with the defined deserializers.
+   * @param keyDeserializer deserializer for the key.
+   * @param valueDeserializer deserializer for the value.
+   */
+  public Properties buildReadProperties(final String keyDeserializer,
+                                        final String valueDeserializer) {
+
+    final Properties props = new Properties();
+    props.putAll(this.readProperties);
+    props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
+        keyDeserializer);
+    props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
+        valueDeserializer);
+    return props;
+  }
+
+  /**
+   * Returns default read properties with the defined Serializers.
+   * @param keySerializer serializer for the key.
+   * @param valueSerializer serializer for the value.
+   */
+  public Properties buildWriteProperties(final String keySerializer,
+                                        final String valueSerializer) {
+
+    final Properties props = new Properties();
+    props.putAll(this.writeProperties);
+    props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
+        keySerializer);
+    props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
+        valueSerializer);
+    return props;
+  }
+
 
   /**
    * Builds Kafka Properties used for the UC4 Benchmark pipeline.
@@ -87,13 +152,9 @@ public class KafkaPropertiesBuilder {
     props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
         valueSerializer);
     props.setProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
-    props.setProperty("specific.avro.writer", TRUE);
+    props.setProperty(SPECIFIC_AVRO_WRITER, TRUE);
 
     return props;
   }
 
-
-
-
-
 }
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/PipelineFactory.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/PipelineFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..e9d2baf6dcb5b81b6ef7c8668458cd505dd7aa0b
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/PipelineFactory.java
@@ -0,0 +1,68 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+import com.hazelcast.jet.pipeline.Pipeline;
+
+import java.util.Properties;
+
+/**
+ * Abstract class to handle the common logic for all pipelines.
+ * Implement {@link #buildPipeline()} method to implement the custom logic of the use case.
+ * Caution implement this with the help of an extendPipeline() method in order to
+ * be testable without further infrastructure.
+ * A template for this is construct the sources in {@link #buildPipeline()} and give them
+ * as parameters to extendTopology(...).
+ * Further implement the pipeline logic in the extendPipeline() method and return the last stage.
+ * Use the returned stage afterwards in the {@link #buildPipeline()} to write the results.
+ */
+public abstract class PipelineFactory {
+
+  protected final Pipeline pipe;
+
+  protected Properties kafkaReadPropsForPipeline;
+  protected Properties kafkaWritePropsForPipeline;
+
+  protected String kafkaInputTopic;
+  protected String kafkaOutputTopic;
+
+
+
+  public PipelineFactory() {
+    this.pipe = Pipeline.create();
+  }
+
+
+  /**
+   * Constructs a pipeline factory with read properties and input topic.
+   * Directly used for Uc1.
+   */
+  public PipelineFactory(final Properties kafkaReadPropsForPipeline,
+                         final String kafkaInputTopic) {
+    this();
+    this.kafkaReadPropsForPipeline = kafkaReadPropsForPipeline;
+    this.kafkaInputTopic = kafkaInputTopic;
+  }
+
+  /**
+   * Constructs a pipeline factory with read/write properties and input/output topic.
+   */
+  public PipelineFactory(final Properties kafkaReadPropsForPipeline,
+                         final String kafkaInputTopic,
+                         final Properties kafkaWritePropsForPipeline,
+                         final String kafkaOutputTopic) {
+    this(kafkaReadPropsForPipeline, kafkaInputTopic);
+    this.kafkaWritePropsForPipeline = kafkaWritePropsForPipeline;
+    this.kafkaOutputTopic = kafkaOutputTopic;
+  }
+
+  /**
+   * Implement to construct the use case logic.
+   * @return pipeline that holds the use case logic.
+   */
+  public abstract Pipeline buildPipeline();
+
+  public Pipeline getPipe() {
+    return this.pipe;
+  }
+
+
+}
diff --git a/theodolite-benchmarks/kstreams-commons/src/main/java/rocks/theodolite/benchmarks/commons/kstreams/ConfigurationKeys.java b/theodolite-benchmarks/kstreams-commons/src/main/java/rocks/theodolite/benchmarks/commons/kstreams/ConfigurationKeys.java
index ca74aa7d9fd88a7d3c20589438e0c9454062d2f0..61a2df2c4014af7367020cc326efd0060241ccc8 100644
--- a/theodolite-benchmarks/kstreams-commons/src/main/java/rocks/theodolite/benchmarks/commons/kstreams/ConfigurationKeys.java
+++ b/theodolite-benchmarks/kstreams-commons/src/main/java/rocks/theodolite/benchmarks/commons/kstreams/ConfigurationKeys.java
@@ -15,27 +15,6 @@ public final class ConfigurationKeys {
 
   public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic";
 
-  // Additional topics
-  public static final String KAFKA_FEEDBACK_TOPIC = "kafka.feedback.topic";
-
-  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
-
-  public static final String KAFKA_CONFIGURATION_TOPIC = "kafka.configuration.topic";
-
-  // UC2
-  public static final String EMIT_PERIOD_MS = "emit.period.ms";
-
-  public static final String GRACE_PERIOD_MS = "grace.period.ms";
-
-  // UC3
-  public static final String KAFKA_WINDOW_DURATION_MINUTES = "kafka.window.duration.minutes";
-
-  // UC4
-  public static final String AGGREGATION_DURATION_DAYS = "aggregation.duration.days";
-
-  public static final String AGGREGATION_ADVANCE_DAYS = "aggregation.advance.days";
-
-
   private ConfigurationKeys() {}
 
 }
diff --git a/theodolite-benchmarks/uc1-beam-samza/Dockerfile b/theodolite-benchmarks/uc1-beam-samza/Dockerfile
index cf6ef6675464e3c9d37db492b39fd8a71ec60e63..8519d68153d3341f2f680de6cd4662510c05ac98 100644
--- a/theodolite-benchmarks/uc1-beam-samza/Dockerfile
+++ b/theodolite-benchmarks/uc1-beam-samza/Dockerfile
@@ -1,9 +1,10 @@
 FROM openjdk:11-slim
 
 ENV MAX_SOURCE_PARALLELISM=1024
+ENV ENABLE_METRICS=true
 
 ADD build/distributions/uc1-beam-samza.tar /
 ADD samza-standalone.properties /
 
-CMD /uc1-beam-samza/bin/uc1-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
+CMD /uc1-beam-samza/bin/uc1-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=$ENABLE_METRICS --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
 
diff --git a/theodolite-benchmarks/uc1-beam/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-beam/src/main/resources/META-INF/application.properties
index e9de96c0df34b1254a8ec9886586e163999c7c6e..6bb2df5bef42038637b88b67b36178797407088e 100644
--- a/theodolite-benchmarks/uc1-beam/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc1-beam/src/main/resources/META-INF/application.properties
@@ -10,11 +10,9 @@ kafka.output.topic=output
 
 schema.registry.url=http://localhost:8081
 
-num.threads=1
-commit.interval.ms=1000
-cache.max.bytes.buffering=-1
-
 specific.avro.reader=true
+
+# Kafka Settings
 enable.auto.commit=true
 max.poll.records=500
 auto.offset.reset=earliest
diff --git a/theodolite-benchmarks/uc1-flink/src/main/java/rocks/theodolite/benchmarks/uc1/flink/ConfigurationKeys.java b/theodolite-benchmarks/uc1-flink/src/main/java/rocks/theodolite/benchmarks/uc1/flink/ConfigurationKeys.java
deleted file mode 100644
index d2235e0693abe8f5f45899eb03bfbefec51526d5..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc1-flink/src/main/java/rocks/theodolite/benchmarks/uc1/flink/ConfigurationKeys.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package rocks.theodolite.benchmarks.uc1.flink;
-
-/**
- * Keys to access configuration parameters.
- */
-public final class ConfigurationKeys {
-
-  public static final String APPLICATION_NAME = "application.name";
-
-  public static final String APPLICATION_VERSION = "application.version";
-
-  public static final String COMMIT_INTERVAL_MS = "commit.interval.ms";
-
-  public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
-
-  public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic";
-
-  public static final String SCHEMA_REGISTRY_URL = "schema.registry.url";
-
-  public static final String CHECKPOINTING = "checkpointing";
-
-  public static final String PARALLELISM = "parallelism";
-
-  private ConfigurationKeys() {}
-
-}
diff --git a/theodolite-benchmarks/uc1-flink/src/main/java/rocks/theodolite/benchmarks/uc1/flink/HistoryServiceFlinkJob.java b/theodolite-benchmarks/uc1-flink/src/main/java/rocks/theodolite/benchmarks/uc1/flink/HistoryServiceFlinkJob.java
index 9d3412c7f7a318b471902f9f2f38e714bf1034ec..5166e314494879799c396fc254582a69cf5d4c62 100644
--- a/theodolite-benchmarks/uc1-flink/src/main/java/rocks/theodolite/benchmarks/uc1/flink/HistoryServiceFlinkJob.java
+++ b/theodolite-benchmarks/uc1-flink/src/main/java/rocks/theodolite/benchmarks/uc1/flink/HistoryServiceFlinkJob.java
@@ -4,6 +4,7 @@ import org.apache.flink.api.common.typeinfo.Types;
 import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
 import rocks.theodolite.benchmarks.commons.flink.AbstractFlinkService;
+import rocks.theodolite.benchmarks.commons.flink.ConfigurationKeys;
 import rocks.theodolite.benchmarks.commons.flink.KafkaConnectorFactory;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
 import rocks.theodolite.benchmarks.uc1.commons.DatabaseAdapter;
diff --git a/theodolite-benchmarks/uc1-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-flink/src/main/resources/META-INF/application.properties
index 905e501b8cb66712f2b245470d96803987a9b93b..4c5a921ce6775e355138e25de2146eec82dc4a18 100644
--- a/theodolite-benchmarks/uc1-flink/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc1-flink/src/main/resources/META-INF/application.properties
@@ -7,6 +7,5 @@ kafka.output.topic=output
 
 schema.registry.url=http://localhost:8081
 
-num.threads=1
-commit.interval.ms=1000
-cache.max.bytes.buffering=-1
+# Flink configuration
+checkpointing.interval.ms=1000
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java
index 83848261318b2e90d19f28d9ab53fdc2cf678279..a662aa1eddf2e667da5ec7714d471fb073f7a268 100644
--- a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java
@@ -1,64 +1,42 @@
 package rocks.theodolite.benchmarks.uc1.hazelcastjet;
 
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.HazelcastJetService;
 
 /**
- * A microservice that manages the history and, therefore, stores and aggregates incoming
- * measurements.
+ * A microservice that records incoming measurements.
  */
-public class HistoryService {
+public class HistoryService extends HazelcastJetService {
 
   private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
 
-  // Hazelcast settings (default)
-  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
-  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
-
-  // Kafka settings (default)
-  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
-  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
-  private static final String KAFKA_TOPIC_DEFAULT = "input";
-
-  // Job name (default)
-  private static final String JOB_NAME = "uc1-hazelcastjet";
-
-
   /**
-   * Entrypoint for UC1 using Gradle Run.
+   * Constructs the use case logic for UC1.
+   * Retrieves the needed values and instantiates a pipeline factory.
    */
-  public static void main(final String[] args) {
-    final HistoryService uc1HistoryService = new HistoryService();
-    try {
-      uc1HistoryService.run();
-    } catch (final Exception e) { // NOPMD
-      LOGGER.error("ABORT MISSION!: {}", e);
-    }
+  public HistoryService() {
+    super(LOGGER);
+    final Properties kafkaProps =
+        this.propsBuilder.buildReadProperties(
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+
+    this.pipelineFactory = new Uc1PipelineFactory(kafkaProps, this.kafkaInputTopic);
+
   }
 
-  /**
-   * Start a UC1 service.
-   *
-   * @throws Exception This Exception occurs if the Uc1HazelcastJetFactory is used in the wrong way.
-   *         Detailed data is provided once an Exception occurs.
-   */
-  public void run() throws Exception { // NOPMD
-    this.createHazelcastJetApplication();
+  @Override
+  protected void registerSerializer() {
+    // empty since we need no serializer in uc1
   }
 
-  /**
-   * Creates a Hazelcast Jet Application for UC1 using the Uc1HazelcastJetFactory.
-   *
-   * @throws Exception This Exception occurs if the Uc1HazelcastJetFactory is used in the wrong way.
-   *         Detailed data is provided once an Exception occurs.
-   */
-  private void createHazelcastJetApplication() throws Exception { // NOPMD
-    new Uc1HazelcastJetFactory()
-        .setPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT,JOB_NAME)
-        .setKafkaInputTopicFromEnv(KAFKA_TOPIC_DEFAULT)
-        .buildUc1Pipeline()
-        .buildUc1JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
-        .runUc1Job(JOB_NAME);
+  public static void main(final String[] args) {
+    new HistoryService().run();
   }
 
+
 }
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1HazelcastJetFactory.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1HazelcastJetFactory.java
deleted file mode 100644
index 93aaa1a7e844634bb4fdf283d5b9f41a0d6c0b7f..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1HazelcastJetFactory.java
+++ /dev/null
@@ -1,178 +0,0 @@
-package rocks.theodolite.benchmarks.uc1.hazelcastjet;
-
-import com.hazelcast.jet.JetInstance;
-import com.hazelcast.jet.config.JobConfig;
-import com.hazelcast.jet.pipeline.Pipeline;
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
-import java.util.Objects;
-import java.util.Properties;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.slf4j.Logger;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
-
-/**
- * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC1
- * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
- * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
- * the Properties and set the input topic which can be done using internal functions of this
- * factory. Outside data only refers to custom values or default values in case data of the
- * environment cannot the fetched.
- */
-public class Uc1HazelcastJetFactory {
-
-  // Information per History Service
-  private Properties kafkaPropertiesForPipeline;
-  private String kafkaInputTopic;
-  private JetInstance uc1JetInstance;
-  private Pipeline uc1JetPipeline;
-
-  /////////////////////////////////////
-  // Layer 1 - Hazelcast Jet Run Job //
-  /////////////////////////////////////
-
-  /**
-   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
-   * JetInstance as a job.
-   *
-   * @param jobName The name of the job.
-   */
-  public void runUc1Job(final String jobName) {
-
-    // Check if a Jet Instance for UC1 is set.
-    if (this.uc1JetInstance == null) {
-      throw new IllegalStateException("Jet Instance is not set! "
-          + "Cannot start a hazelcast jet job for UC1.");
-    }
-
-    // Check if a Pipeline for UC1 is set.
-    if (this.uc1JetPipeline == null) {
-      throw new IllegalStateException(
-          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC1.");
-    }
-
-    // Adds the job name and joins a job to the JetInstance defined in this factory
-    final JobConfig jobConfig = new JobConfig();
-    jobConfig.setName(jobName);
-    this.uc1JetInstance.newJobIfAbsent(this.uc1JetPipeline, jobConfig).join();
-  }
-
-  /////////////
-  // Layer 2 //
-  /////////////
-
-  /**
-   * Build a Hazelcast JetInstance used to run a job on.
-   *
-   * @param logger The logger specified for this JetInstance.
-   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
-   *        environment.
-   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
-   * @return A Uc1HazelcastJetFactory containing a set JetInstance.
-   */
-  public Uc1HazelcastJetFactory buildUc1JetInstanceFromEnv(final Logger logger,
-      final String bootstrapServerDefault,
-      final String hzKubernetesServiceDnsKey) {
-    this.uc1JetInstance = new JetInstanceBuilder()
-        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
-        .build();
-    return this;
-  }
-
-  /**
-   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
-   * topic and kafka properties defined in this factory beforehand.
-   *
-   * @return A Uc1HazelcastJetFactory containg a set pipeline.
-   */
-  public Uc1HazelcastJetFactory buildUc1Pipeline() {
-
-    // Check if Properties for the Kafka Input are set.
-    if (this.kafkaPropertiesForPipeline == null) {
-      throw new IllegalStateException(
-          "Kafka Properties for pipeline not set! Cannot build pipeline.");
-    }
-
-    // Check if the Kafka input topic is set.
-    if (this.kafkaInputTopic == null) {
-      throw new IllegalStateException("Kafka input topic for pipeline not set! "
-          + "Cannot build pipeline.");
-    }
-
-    // Build Pipeline Using the pipelineBuilder
-    final Uc1PipelineBuilder pipeBuilder = new Uc1PipelineBuilder();
-    this.uc1JetPipeline =
-        pipeBuilder.build(this.kafkaPropertiesForPipeline, this.kafkaInputTopic);
-    // Return Uc1HazelcastJetBuilder factory
-    return this;
-  }
-
-  /////////////
-  // Layer 3 //
-  /////////////
-
-  /**
-   * Sets kafka properties for pipeline used in this builder.
-   *
-   * @param kafkaProperties A propeties object containing necessary values used for the hazelcst jet
-   *        kafka connection.
-   * @return The Uc1HazelcastJetBuilder factory with set kafkaPropertiesForPipeline.
-   */
-  public Uc1HazelcastJetFactory setCustomProperties(final Properties kafkaProperties) { // NOPMD
-    this.kafkaPropertiesForPipeline = kafkaProperties;
-    return this;
-  }
-
-  /**
-   * Sets kafka properties for pipeline used in this builder using environment variables.
-   *
-   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
-   *        can be fetched from the environment.
-   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
-   *        url can be fetched from the environment.
-   * @return The Uc1HazelcastJetBuilder factory with set kafkaPropertiesForPipeline.
-   */
-  public Uc1HazelcastJetFactory setPropertiesFromEnv(final String bootstrapServersDefault, // NOPMD
-      final String schemaRegistryUrlDefault,
-      final String jobName) {
-    // Use KafkaPropertiesBuilder to build a properties object used for kafka
-    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
-    final Properties kafkaProps =
-        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
-            schemaRegistryUrlDefault,
-            jobName,
-            StringDeserializer.class.getCanonicalName(),
-            KafkaAvroDeserializer.class.getCanonicalName());
-    this.kafkaPropertiesForPipeline = kafkaProps;
-    return this;
-  }
-
-  /**
-   * Sets the kafka input topic for the pipeline used in this builder.
-   *
-   * @param inputTopic The kafka topic used as the pipeline input.
-   * @return A Uc1HazelcastJetBuilder factory with a set kafkaInputTopic.
-   */
-  public Uc1HazelcastJetFactory setCustomKafkaInputTopic(final String inputTopic) { // NOPMD
-    this.kafkaInputTopic = inputTopic;
-    return this;
-  }
-
-  /**
-   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
-   *
-   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
-   *        environment.
-   * @return A Uc1HazelcastJetBuilder factory with a set kafkaInputTopic.
-   */
-  public Uc1HazelcastJetFactory setKafkaInputTopicFromEnv(final String defaultInputTopic) { // NOPMD
-    this.kafkaInputTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
-        defaultInputTopic);
-    return this;
-  }
-
-
-
-}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineBuilder.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineFactory.java
similarity index 65%
rename from theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineBuilder.java
rename to theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineFactory.java
index e6107682ef3aff91d45bcd7a65675b5a6323975e..a373537b3358a396071d6542ce4aaaf4b3d25c3f 100644
--- a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineBuilder.java
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineFactory.java
@@ -6,44 +6,49 @@ import com.hazelcast.jet.pipeline.Sink;
 import com.hazelcast.jet.pipeline.SinkBuilder;
 import com.hazelcast.jet.pipeline.StreamSource;
 import com.hazelcast.jet.pipeline.StreamStage;
-import java.util.Map.Entry;
+import java.util.Map;
 import java.util.Properties;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.PipelineFactory;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
 import rocks.theodolite.benchmarks.uc1.commons.DatabaseAdapter;
 import rocks.theodolite.benchmarks.uc1.commons.DatabaseWriter;
 import rocks.theodolite.benchmarks.uc1.commons.logger.LogWriterFactory;
 
-
 /**
- * Builder to build a HazelcastJet Pipeline for UC1 which can be used for stream processing using
- * Hazelcast Jet.
+ * PipelineFactory for use case 1.
  */
-public class Uc1PipelineBuilder {
+public class Uc1PipelineFactory extends PipelineFactory {
 
   private final DatabaseAdapter<String> databaseAdapter = LogWriterFactory.forJson();
 
   /**
-   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   * Creates a new Uc1PipelineFactory.
    *
-   * @param kafkaPropsForPipeline Properties object containing the necessary Kafka attributes.
+   * @param kafkaReadPropsForPipeline Properties object containing the necessary Kafka attributes.
    * @param kafkaInputTopic The name of the input topic used for the pipeline.
-   * @return A Hazelcast Jet pipeline which processes data for Uc1.
    */
-  public Pipeline build(final Properties kafkaPropsForPipeline, final String kafkaInputTopic) {
+  public Uc1PipelineFactory(final Properties kafkaReadPropsForPipeline,
+      final String kafkaInputTopic) {
+    super(kafkaReadPropsForPipeline, kafkaInputTopic);
+  }
 
-    // Define a new pipeline
-    final Pipeline pipe = Pipeline.create();
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @return A Hazelcast Jet pipeline which processes data for Uc1.
+   */
+  @Override
+  public Pipeline buildPipeline() {
 
     // Define the Kafka Source
-    final StreamSource<Entry<String, ActivePowerRecord>> kafkaSource =
-        KafkaSources.<String, ActivePowerRecord>kafka(kafkaPropsForPipeline, kafkaInputTopic);
+    final StreamSource<Map.Entry<String, ActivePowerRecord>> kafkaSource =
+        KafkaSources.<String, ActivePowerRecord>kafka(this.kafkaReadPropsForPipeline,
+            this.kafkaInputTopic);
 
     // Extend UC1 topology to the pipeline
-    final StreamStage<String> uc1TopologyProduct = this.extendUc1Topology(pipe, kafkaSource);
+    final StreamStage<String> uc1TopologyProduct = this.extendUc1Topology(kafkaSource);
 
     // Add Sink: Logger
-    // Do not refactor this to just use the call
-    // (There is a problem with static calls in functions in hazelcastjet)
     final DatabaseWriter<String> writer = this.databaseAdapter.getDatabaseWriter();
     final Sink<String> sink = SinkBuilder.sinkBuilder(
         "Sink into database", x -> writer)
@@ -52,7 +57,7 @@ public class Uc1PipelineBuilder {
 
     uc1TopologyProduct.writeTo(sink);
 
-    return pipe;
+    return this.pipe;
   }
 
   /**
@@ -63,20 +68,19 @@ public class Uc1PipelineBuilder {
    * using GSON.
    * </p>
    *
-   * @param pipe The blank hazelcast jet pipeline to extend the logic to.
    * @param source A streaming source to fetch data from.
    * @return A {@code StreamStage<String>} with the above definition of the String. It can be used
    *         to be further modified or directly be written into a sink.
    */
-  public StreamStage<String> extendUc1Topology(final Pipeline pipe,
-      final StreamSource<Entry<String, ActivePowerRecord>> source) {
+  public StreamStage<String> extendUc1Topology(
+      final StreamSource<Map.Entry<String, ActivePowerRecord>> source) {
 
     // Build the pipeline topology
-    return pipe.readFrom(source)
+    return this.pipe.readFrom(source)
         .withNativeTimestamps(0)
-        .setLocalParallelism(1)
+        // .setLocalParallelism(1)
         .setName("Convert content")
-        .map(Entry::getValue)
+        .map(Map.Entry::getValue)
         .map(this.databaseAdapter.getRecordConverter()::convert);
   }
 }
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java b/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java
index 8ffde0487ab88c260655f51eac3d2701f31a7ab0..3cb30bf239d8035ce3813063ee6089ec7440c441 100644
--- a/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java
@@ -17,6 +17,7 @@ import com.hazelcast.jet.test.SerialTest;
 import com.hazelcast.logging.ILogger;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Properties;
 import java.util.concurrent.CompletionException;
 import org.junit.After;
 import org.junit.Assert;
@@ -24,11 +25,12 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
 import rocks.theodolite.benchmarks.uc1.commons.DatabaseAdapter;
 import rocks.theodolite.benchmarks.uc1.commons.DatabaseWriter;
 import rocks.theodolite.benchmarks.uc1.commons.logger.LogWriterFactory;
-import rocks.theodolite.benchmarks.uc1.hazelcastjet.Uc1PipelineBuilder;
+import rocks.theodolite.benchmarks.uc1.hazelcastjet.Uc1PipelineFactory;
 
 /**
  * Test methods for the Hazelcast Jet Implementation of UC1.
@@ -40,8 +42,8 @@ public class Uc1PipelineTest extends JetTestSupport {
   private Pipeline testPipeline = null;
   private StreamStage<String> uc1Topology = null;
 
-  // Standart Logger
-  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(Uc1PipelineTest.class);
+  // Standard Logger
+  private static final Logger LOGGER = LoggerFactory.getLogger(Uc1PipelineTest.class);
   // HazelcastJet Logger
   private static final ILogger logger = getLogger(Uc1PipelineTest.class);
 
@@ -82,10 +84,10 @@ public class Uc1PipelineTest extends JetTestSupport {
         });
 
     // Create pipeline to test
-    final Uc1PipelineBuilder pipelineBuilder = new Uc1PipelineBuilder();
-    this.testPipeline = Pipeline.create();
-    this.uc1Topology =
-        pipelineBuilder.extendUc1Topology(this.testPipeline, testSource);
+    final Properties properties = new Properties();
+    final Uc1PipelineFactory factory = new Uc1PipelineFactory(properties, "");
+    this.uc1Topology = factory.extendUc1Topology(testSource);
+    this.testPipeline = factory.getPipe();
 
     // Create DatabaseWriter sink
     final DatabaseWriter<String> adapter = this.databaseAdapter.getDatabaseWriter();
diff --git a/theodolite-benchmarks/uc1-kstreams/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-kstreams/src/main/resources/META-INF/application.properties
index e3371cc87e20e85e6e8c327955537e6e49dab86e..9bb191b3eaa56bab474e2926565449884d76badd 100644
--- a/theodolite-benchmarks/uc1-kstreams/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc1-kstreams/src/main/resources/META-INF/application.properties
@@ -6,3 +6,5 @@ kafka.input.topic=input
 
 schema.registry.url=http://localhost:8081
 
+# Kafka Streams Config
+commit.interval.ms=5000	
diff --git a/theodolite-benchmarks/uc2-beam-samza/Dockerfile b/theodolite-benchmarks/uc2-beam-samza/Dockerfile
index ae762791c40fc6981ce7e5fd08bea860ed9208ec..f873ec8cba111e70dfdb29ae20dc4fbb2e7a9ecc 100644
--- a/theodolite-benchmarks/uc2-beam-samza/Dockerfile
+++ b/theodolite-benchmarks/uc2-beam-samza/Dockerfile
@@ -1,8 +1,9 @@
 FROM openjdk:11-slim
 
 ENV MAX_SOURCE_PARALLELISM=1024
+ENV ENABLE_METRICS=true
 
 ADD build/distributions/uc2-beam-samza.tar /
 ADD samza-standalone.properties /
 
-CMD /uc2-beam-samza/bin/uc2-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
+CMD /uc2-beam-samza/bin/uc2-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=$ENABLE_METRICS --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
diff --git a/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/PipelineFactory.java b/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/PipelineFactory.java
index decbcae1c4b524f9f39295ecd49275a3c1b09951..d8bf1ba526693d32c4c15ccdfb112351ce7e1c0e 100644
--- a/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/PipelineFactory.java
+++ b/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/PipelineFactory.java
@@ -40,10 +40,10 @@ public class PipelineFactory extends AbstractPipelineFactory {
 
   @Override
   protected void constructPipeline(final Pipeline pipeline) {
-    final String outputTopic = this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+    final String outputTopic = this.config.getString(Uc2ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
 
-    final Duration duration = Duration.standardMinutes(
-        this.config.getInt(ConfigurationKeys.KAFKA_WINDOW_DURATION_MINUTES));
+    final Duration downsampleInterval = Duration.standardMinutes(
+        this.config.getInt(Uc2ConfigurationKeys.DOWNSAMPLE_INTERVAL_MINUTES));
 
     final KafkaActivePowerTimestampReader kafkaReader = super.buildKafkaReader();
 
@@ -58,7 +58,7 @@ public class PipelineFactory extends AbstractPipelineFactory {
     // Apply pipeline transformations
     pipeline.apply(kafkaReader)
         // Apply a fixed window
-        .apply(Window.<KV<String, ActivePowerRecord>>into(FixedWindows.of(duration)))
+        .apply(Window.<KV<String, ActivePowerRecord>>into(FixedWindows.of(downsampleInterval)))
         // Aggregate per window for every key
         .apply(Combine.<String, ActivePowerRecord, Stats>perKey(new StatsAggregation()))
         .setCoder(KvCoder.of(StringUtf8Coder.of(), SerializableCoder.of(Stats.class)))
diff --git a/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/StatsAggregation.java b/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/StatsAggregation.java
index cf320bf18b37f25b787c1baea1109892f2aa83fa..491b8b05b95695006835442608d045c9a76e3222 100644
--- a/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/StatsAggregation.java
+++ b/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/StatsAggregation.java
@@ -31,7 +31,7 @@ public class StatsAggregation extends CombineFn<ActivePowerRecord, StatsAccumula
 
   @Override
   public StatsAccumulator mergeAccumulators(final Iterable<StatsAccumulator> accums) {
-    final StatsAccumulator merged = createAccumulator();
+    final StatsAccumulator merged = this.createAccumulator();
     for (final StatsAccumulator accum : accums) {
       merged.addAll(accum.snapshot());
     }
diff --git a/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/Uc2ConfigurationKeys.java b/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/Uc2ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..f466f31aa5a0f1c36b6a0a1fac90d3e739bf2c9d
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/Uc2ConfigurationKeys.java
@@ -0,0 +1,14 @@
+package rocks.theodolite.benchmarks.uc2.beam;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class Uc2ConfigurationKeys {
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String DOWNSAMPLE_INTERVAL_MINUTES = "downsample.interval.minutes";
+
+  private Uc2ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc2-beam/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-beam/src/main/resources/META-INF/application.properties
index c6672125a8b6a074cb7eca31bd90700cd4da736a..3f81e6005be8e83893f4c7b51f91554493505758 100644
--- a/theodolite-benchmarks/uc2-beam/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc2-beam/src/main/resources/META-INF/application.properties
@@ -4,15 +4,13 @@ application.version=0.0.1
 kafka.bootstrap.servers=localhost:9092
 kafka.input.topic=input
 kafka.output.topic=output
-kafka.window.duration.minutes=1
-
 schema.registry.url=http://localhost:8081
 
-num.threads=1
-commit.interval.ms=1000
-cache.max.bytes.buffering=-1
+downsample.interval.minutes=1
 
 specific.avro.reader=true
+
+# Kafka Settings
 enable.auto.commit=true
 max.poll.records=500
-auto.offset.reset=earliest
\ No newline at end of file
+auto.offset.reset=earliest
diff --git a/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/ConfigurationKeys.java b/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/ConfigurationKeys.java
deleted file mode 100644
index bcb15b7d655d9a05b0b65d4dda480379173a8212..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/ConfigurationKeys.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package rocks.theodolite.benchmarks.uc2.flink;
-
-/**
- * Keys to access configuration parameters.
- */
-public final class ConfigurationKeys {
-
-  public static final String APPLICATION_NAME = "application.name";
-
-  public static final String APPLICATION_VERSION = "application.version";
-
-  public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
-
-  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
-
-  public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic";
-
-  public static final String SCHEMA_REGISTRY_URL = "schema.registry.url";
-
-  public static final String COMMIT_INTERVAL_MS = "commit.interval.ms";
-
-  public static final String KAFKA_WINDOW_DURATION_MINUTES = "kafka.window.duration.minutes";
-
-  public static final String FLINK_STATE_BACKEND = "flink.state.backend";
-
-  public static final String FLINK_STATE_BACKEND_PATH = "flink.state.backend.path";
-
-  public static final String FLINK_STATE_BACKEND_MEMORY_SIZE = // NOPMD
-      "flink.state.backend.memory.size";
-
-  public static final String CHECKPOINTING = "checkpointing";
-
-  public static final String PARALLELISM = "parallelism";
-
-  private ConfigurationKeys() {}
-
-}
diff --git a/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/HistoryServiceFlinkJob.java b/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/HistoryServiceFlinkJob.java
index 5a34d17a89186630afb0917e16940210b84fd5e8..d349d4086c7cf597a18fb681a1755f10a40b551a 100644
--- a/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/HistoryServiceFlinkJob.java
+++ b/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/HistoryServiceFlinkJob.java
@@ -11,6 +11,7 @@ import org.apache.kafka.common.serialization.Serdes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import rocks.theodolite.benchmarks.commons.flink.AbstractFlinkService;
+import rocks.theodolite.benchmarks.commons.flink.ConfigurationKeys;
 import rocks.theodolite.benchmarks.commons.flink.KafkaConnectorFactory;
 import rocks.theodolite.benchmarks.commons.flink.serialization.StatsSerializer;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
@@ -35,13 +36,12 @@ public final class HistoryServiceFlinkJob extends AbstractFlinkService {
 
   @Override
   protected void buildPipeline() {
-    final String kafkaBroker = this.config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
+    final String kafkaBroker = this.config.getString(Uc2ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
     final String schemaRegistryUrl = this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL);
     final String inputTopic = this.config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
-    final String outputTopic = this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
-    final int windowDurationMinutes =
-        this.config.getInt(ConfigurationKeys.KAFKA_WINDOW_DURATION_MINUTES);
-    final Time windowDuration = Time.minutes(windowDurationMinutes);
+    final String outputTopic = this.config.getString(Uc2ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+    final Time windowDuration = Time.minutes(
+        this.config.getInt(Uc2ConfigurationKeys.DOWNSAMPLE_INTERVAL_MINUTES));
     final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true);
 
     final KafkaConnectorFactory kafkaConnector = new KafkaConnectorFactory(
@@ -65,7 +65,7 @@ public final class HistoryServiceFlinkJob extends AbstractFlinkService {
         .map(t -> {
           final String key = t.f0;
           final String value = t.f1.toString();
-          LOGGER.info("{}: {}", key, value);
+          // LOGGER.info("{}: {}", key, value);
           return new Tuple2<>(key, value);
         }).name("map").returns(Types.TUPLE(Types.STRING, Types.STRING))
         .addSink(kafkaSink).name("[Kafka Producer] Topic: " + outputTopic);
diff --git a/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/Uc2ConfigurationKeys.java b/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/Uc2ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..06cc33908a501770bd91907923e8a3536ff84af4
--- /dev/null
+++ b/theodolite-benchmarks/uc2-flink/src/main/java/rocks/theodolite/benchmarks/uc2/flink/Uc2ConfigurationKeys.java
@@ -0,0 +1,16 @@
+package rocks.theodolite.benchmarks.uc2.flink;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class Uc2ConfigurationKeys {
+
+  public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String DOWNSAMPLE_INTERVAL_MINUTES = "downsample.interval.minutes";
+
+  private Uc2ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc2-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-flink/src/main/resources/META-INF/application.properties
index f971390984ee41be1fce54e62f4f43ee2b9c02da..f12c875e230a3c2871097e7256948dc90d75edf4 100644
--- a/theodolite-benchmarks/uc2-flink/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc2-flink/src/main/resources/META-INF/application.properties
@@ -5,7 +5,8 @@ kafka.bootstrap.servers=localhost:9092
 kafka.input.topic=input
 kafka.output.topic=output
 schema.registry.url=http://localhost:8081
-num.threads=1
-commit.interval.ms=100
-cache.max.bytes.buffering=-1
-kafka.window.duration.minutes=1
\ No newline at end of file
+
+downsample.interval.minutes=1
+
+# Flink configuration
+checkpointing.interval.ms=1000
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java
index 7737959bde97ce8332c87fc88b0aa9fd90bf8250..9025ae35a15e17ae7501547777bfd63d2dbcb5a3 100644
--- a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java
@@ -1,70 +1,59 @@
 package rocks.theodolite.benchmarks.uc2.hazelcastjet;
 
+import com.google.common.math.StatsAccumulator;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.time.Duration;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.HazelcastJetService;
+
 
 /**
- * A microservice that manages the history and, therefore, stores and aggregates incoming
- * measurements.
+ * A microservice that aggregate incoming messages in a tumbling window.
  */
-public class HistoryService {
+public class HistoryService extends HazelcastJetService {
 
   private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
 
-  // Hazelcast settings (default)
-  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
-  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
-
-  // Kafka settings (default)
-  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
-  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
-  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
-  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
-
-  // UC2 specific (default)
-  private static final String DOWNSAMPLE_INTERVAL_DEFAULT_MS = "60000";
-
-  // Job name (default)
-  private static final String JOB_NAME = "uc2-hazelcastjet";
-
   /**
-   * Entrypoint for UC2 using Gradle Run.
+   * Constructs the use case logic for UC2. Retrieves the needed values and instantiates a pipeline
+   * factory.
    */
-  public static void main(final String[] args) {
-    final HistoryService uc2HistoryService = new HistoryService();
-    try {
-      uc2HistoryService.run();
-    } catch (final Exception e) { // NOPMD
-      LOGGER.error("ABORT MISSION!: {}", e);
-    }
+  public HistoryService() {
+    super(LOGGER);
+    final Properties kafkaProps =
+        this.propsBuilder.buildReadProperties(
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+
+    final Properties kafkaWriteProps =
+        this.propsBuilder.buildWriteProperties(
+            StringSerializer.class.getCanonicalName(),
+            StringSerializer.class.getCanonicalName());
+
+    final String kafkaOutputTopic = this.config.getString(Uc2ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+
+    final Duration downsampleInterval = Duration.ofMinutes(
+        this.config.getInt(Uc2ConfigurationKeys.DOWNSAMPLE_INTERVAL_MINUTES));
+
+    this.pipelineFactory = new Uc2PipelineFactory(
+        kafkaProps,
+        this.kafkaInputTopic,
+        kafkaWriteProps,
+        kafkaOutputTopic,
+        downsampleInterval);
   }
 
-  /**
-   * Start a UC2 service.
-   *
-   * @throws Exception This Exception occurs if the Uc2HazelcastJetFactory is used in the wrong way.
-   *         Detailed data is provided once an Exception occurs.
-   */
-  public void run() throws Exception { // NOPMD
-    this.createHazelcastJetApplication();
+  @Override
+  protected void registerSerializer() {
+    this.jobConfig.registerSerializer(StatsAccumulator.class, StatsAccumulatorSerializer.class);
   }
 
-  /**
-   * Creates a Hazelcast Jet Application for UC2 using the Uc1HazelcastJetFactory.
-   *
-   * @throws Exception This Exception occurs if the Uc2HazelcastJetFactory is used in the wrong way.
-   *         Detailed data is provided once an Exception occurs.
-   */
-  private void createHazelcastJetApplication() throws Exception { // NOPMD
-    new Uc2HazelcastJetFactory()
-        .setReadPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT, JOB_NAME)
-        .setWritePropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
-        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
-        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
-        .setDownsampleIntervalFromEnv(DOWNSAMPLE_INTERVAL_DEFAULT_MS)
-        .buildUc2Pipeline()
-        .buildUc2JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
-        .runUc2Job(JOB_NAME);
-  }
 
+  public static void main(final String[] args) {
+    new HistoryService().run();
+  }
 }
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSerializer.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/StatsAccumulatorSerializer.java
similarity index 84%
rename from theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSerializer.java
rename to theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/StatsAccumulatorSerializer.java
index 5c22b8dd6cc1a7af995a98b4388f40a1a3867ba5..d2fec1b131ad4092d72a14b773a16508c3209ad4 100644
--- a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSerializer.java
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/StatsAccumulatorSerializer.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics;
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
 
 import com.google.common.math.Stats;
 import com.google.common.math.StatsAccumulator;
@@ -8,8 +8,7 @@ import com.hazelcast.nio.serialization.StreamSerializer;
 import java.io.IOException;
 
 /**
- * A serializer and deserializer for the StatsAccumulator which is used in the UC2 implementation
- * using Hazelcast Jet.
+ * A serializer and deserializer for the {@link StatsAccumulator}.
  */
 public class StatsAccumulatorSerializer implements StreamSerializer<StatsAccumulator> {
 
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSupplier.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/StatsAccumulatorSupplier.java
similarity index 63%
rename from theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSupplier.java
rename to theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/StatsAccumulatorSupplier.java
index f4d203f03185cda712a5280634d8d3858c02f30d..401154249be3d345bec3696a6c158c863fa97954 100644
--- a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSupplier.java
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/StatsAccumulatorSupplier.java
@@ -1,18 +1,17 @@
-package rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics;
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
 
 import com.google.common.math.StatsAccumulator;
 import com.hazelcast.function.SupplierEx;
 
 /**
- * Supplies a StatsAccumulator. Is used in the aggregation operation of the Hazelcast Jet
- * implementation for UC2.
+ * Supplies a {@link StatsAccumulator}.
  */
 public class StatsAccumulatorSupplier implements SupplierEx<StatsAccumulator> {
 
   private static final long serialVersionUID = -656395626316842910L; // NOPMD
 
   /**
-   * Gets a StatsAccumulator.
+   * Gets a {@link StatsAccumulator}.
    */
   @Override
   public StatsAccumulator getEx() throws Exception {
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/StatsAggregatorFactory.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/StatsAggregatorFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..dd52b12be72119b8d04707a984e0c5d921845de6
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/StatsAggregatorFactory.java
@@ -0,0 +1,50 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.jet.aggregate.AggregateOperation;
+import com.hazelcast.jet.aggregate.AggregateOperation1;
+import java.util.Map.Entry;
+import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
+
+
+/**
+ * Factory for creating an aggregation operator for {@link Stats} objects.
+ */
+public final class StatsAggregatorFactory {
+
+  private StatsAggregatorFactory() {}
+
+  /**
+   * Defines an AggregateOperation1 for Hazelcast Jet which is used in the Pipeline of the Hazelcast
+   * Jet implementation of UC2.
+   *
+   * <p>
+   * Takes a windowed and keyed {@code Entry<String,ActivePowerRecord>} elements and returns a
+   * {@link Stats} object.
+   * </p>
+   *
+   * @return An AggregateOperation used by Hazelcast Jet in a streaming stage which aggregates
+   *         ActivePowerRecord Objects into Stats Objects.
+   */
+  public static AggregateOperation1<Entry<?, ActivePowerRecord>, StatsAccumulator, Stats> // NOCS
+      create() {
+    // Aggregate Operation to Create a Stats Object from Entry<?,ActivePowerRecord> items using
+    // the StatsAccumulator.
+    return AggregateOperation
+        // Creates the accumulator
+        .withCreate(new StatsAccumulatorSupplier())
+        // Defines the accumulation
+        .<Entry<?, ActivePowerRecord>>andAccumulate((accumulator, item) -> {
+          accumulator.add(item.getValue().getValueInW());
+        })
+        // Defines the combination of spread out instances
+        .andCombine((left, right) -> {
+          final Stats rightStats = right.snapshot();
+          left.addAll(rightStats);
+
+        })
+        // Finishes the aggregation
+        .andExportFinish(StatsAccumulator::snapshot);
+  }
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2ConfigurationKeys.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..16f1c0c048bbac1f5e23aaba1f55b6d54edd80fd
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2ConfigurationKeys.java
@@ -0,0 +1,12 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+/**
+ * Configuration Keys used for Hazelcast Jet Benchmark implementations.
+ */
+public class Uc2ConfigurationKeys {
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String DOWNSAMPLE_INTERVAL_MINUTES = "downsample.interval.minutes";
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2HazelcastJetFactory.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2HazelcastJetFactory.java
deleted file mode 100644
index 92029a78405deacae5e7ad352b184eb852cd842e..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2HazelcastJetFactory.java
+++ /dev/null
@@ -1,301 +0,0 @@
-package rocks.theodolite.benchmarks.uc2.hazelcastjet;
-
-import com.google.common.math.StatsAccumulator;
-import com.hazelcast.jet.JetInstance;
-import com.hazelcast.jet.config.JobConfig;
-import com.hazelcast.jet.pipeline.Pipeline;
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
-import java.util.Objects;
-import java.util.Properties;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.apache.kafka.common.serialization.StringSerializer;
-import org.slf4j.Logger;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
-import rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics.StatsAccumulatorSerializer;
-
-/**
- * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC2
- * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
- * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
- * the Read and Write Properties, set the input and output topic, and set the downsample interval
- * which can be done using internal functions of this factory. Outside data only refers to custom
- * values or default values in case data of the environment cannot the fetched.
- */
-public class Uc2HazelcastJetFactory {
-
-  // Information per History Service
-  private Properties kafkaReadPropsForPipeline;
-  private Properties kafkaWritePropsForPipeline;
-  private String kafkaInputTopic;
-  private String kafkaOutputTopic;
-  private JetInstance uc2JetInstance;
-  private Pipeline uc2JetPipeline;
-  // UC2 specific
-  private int downsampleInterval;
-
-  /////////////////////////////////////
-  // Layer 1 - Hazelcast Jet Run Job //
-  /////////////////////////////////////
-
-  /**
-   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
-   * JetInstance as a job.
-   *
-   * @param jobName The name of the job.
-   */
-  public void runUc2Job(final String jobName) {
-
-    // Check if a Jet Instance for UC2 is set.
-    if (this.uc2JetInstance == null) {
-      throw new IllegalStateException("Jet Instance is not set! "
-          + "Cannot start a hazelcast jet job for UC2.");
-    }
-
-    // Check if a Pipeline for UC2 is set.
-    if (this.uc2JetPipeline == null) {
-      throw new IllegalStateException(
-          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC2.");
-    }
-
-    // Adds the job name and joins a job to the JetInstance defined in this factory
-    final JobConfig jobConfig = new JobConfig();
-    jobConfig.registerSerializer(StatsAccumulator.class, StatsAccumulatorSerializer.class);
-    jobConfig.setName(jobName);
-    this.uc2JetInstance.newJobIfAbsent(this.uc2JetPipeline, jobConfig).join();
-  }
-
-  /////////////
-  // Layer 2 //
-  /////////////
-
-  /**
-   * Build a Hazelcast JetInstance used to run a job on.
-   *
-   * @param logger The logger specified for this JetInstance.
-   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
-   *        environment.
-   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
-   * @return A Uc2HazelcastJetFactory containing a set JetInstance.
-   */
-  public Uc2HazelcastJetFactory buildUc2JetInstanceFromEnv(final Logger logger,
-      final String bootstrapServerDefault,
-      final String hzKubernetesServiceDnsKey) {
-    this.uc2JetInstance = new JetInstanceBuilder()
-        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
-        .build();
-    return this;
-  }
-
-  /**
-   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
-   * topic and kafka properties defined in this factory beforehand.
-   *
-   * @return A Uc2HazelcastJetFactory containg a set pipeline.
-   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
-   *         cannot be built.
-   */
-  public Uc2HazelcastJetFactory buildUc2Pipeline() throws IllegalStateException { // NOPMD
-
-    final String defaultPipelineWarning = "Cannot build pipeline."; // NOPMD
-
-    // Check if Properties for the Kafka Input are set.
-    if (this.kafkaReadPropsForPipeline == null) {
-      throw new IllegalStateException("Kafka Read Properties for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if Properties for the Kafka Output are set.
-    if (this.kafkaWritePropsForPipeline == null) {
-      throw new IllegalStateException("Kafka Write Properties for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the Kafka input topic is set.
-    if (this.kafkaInputTopic == null) {
-      throw new IllegalStateException("Kafka input topic for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the Kafka output topic is set.
-    if (this.kafkaOutputTopic == null) {
-      throw new IllegalStateException("kafka output topic for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the downsampleInterval (tumbling window time) is set.
-    if (this.downsampleInterval <= 0) {
-      throw new IllegalStateException(
-          "downsample interval for pipeline not set or not bigger than 0! "
-              + defaultPipelineWarning);
-    }
-
-    // Build Pipeline Using the pipelineBuilder
-    final Uc2PipelineBuilder pipeBuilder = new Uc2PipelineBuilder();
-    this.uc2JetPipeline =
-        pipeBuilder.build(this.kafkaReadPropsForPipeline, this.kafkaWritePropsForPipeline,
-            this.kafkaInputTopic, this.kafkaOutputTopic, this.downsampleInterval);
-    // Return Uc2HazelcastJetBuilder factory
-    return this;
-  }
-
-  /////////////
-  // Layer 3 //
-  /////////////
-
-  /**
-   * Sets kafka read properties for pipeline used in this builder.
-   *
-   * @param kafkaReadProperties A propeties object containing necessary values used for the hazelcst
-   *        jet kafka connection to read data.
-   * @return The Uc2HazelcastJetBuilder factory with set kafkaReadPropsForPipeline.
-   */
-  public Uc2HazelcastJetFactory setCustomReadProperties(// NOPMD
-      final Properties kafkaReadProperties) {
-    this.kafkaReadPropsForPipeline = kafkaReadProperties;
-    return this;
-  }
-
-  /**
-   * Sets kafka write properties for pipeline used in this builder.
-   *
-   * @param kafkaWriteProperties A propeties object containing necessary values used for the
-   *        hazelcst jet kafka connection to write data.
-   * @return The Uc2HazelcastJetBuilder factory with set kafkaWritePropsForPipeline.
-   */
-  public Uc2HazelcastJetFactory setCustomWriteProperties(// NOPMD
-      final Properties kafkaWriteProperties) {
-    this.kafkaWritePropsForPipeline = kafkaWriteProperties;
-    return this;
-  }
-
-  /**
-   * Sets kafka read properties for pipeline used in this builder using environment variables.
-   *
-   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
-   *        can be fetched from the environment.
-   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
-   *        url can be fetched from the environment.
-   * @return The Uc2HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
-   */
-  public Uc2HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
-      final String bootstrapServersDefault,
-      final String schemaRegistryUrlDefault,
-      final String jobName) {
-    // Use KafkaPropertiesBuilder to build a properties object used for kafka
-    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
-    final Properties kafkaReadProps =
-        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
-            schemaRegistryUrlDefault,
-            jobName,
-            StringDeserializer.class.getCanonicalName(),
-            KafkaAvroDeserializer.class.getCanonicalName());
-    this.kafkaReadPropsForPipeline = kafkaReadProps;
-    return this;
-  }
-
-  /**
-   * Sets kafka write properties for pipeline used in this builder using environment variables.
-   *
-   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
-   *        can be fetched from the environment.
-   * @return The Uc2HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
-   */
-  public Uc2HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
-      final String bootstrapServersDefault, final String schemaRegistryUrlDefault) {
-    // Use KafkaPropertiesBuilder to build a properties object used for kafka
-    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
-    final Properties kafkaWriteProps =
-        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault,
-            schemaRegistryUrlDefault,
-            StringSerializer.class.getCanonicalName(),
-            StringSerializer.class.getCanonicalName());
-    this.kafkaWritePropsForPipeline = kafkaWriteProps;
-    return this;
-  }
-
-  /**
-   * Sets the kafka input topic for the pipeline used in this builder.
-   *
-   * @param inputTopic The kafka topic used as the pipeline input.
-   * @return A Uc2HazelcastJetBuilder factory with a set kafkaInputTopic.
-   */
-  public Uc2HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
-      final String inputTopic) {
-    this.kafkaInputTopic = inputTopic;
-    return this;
-  }
-
-  /**
-   * Sets the kafka input output for the pipeline used in this builder.
-   *
-   * @param outputTopic The kafka topic used as the pipeline output.
-   * @return A Uc2HazelcastJetBuilder factory with a set kafkaOutputTopic.
-   */
-  public Uc2HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
-    this.kafkaOutputTopic = outputTopic;
-    return this;
-  }
-
-
-  /**
-   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
-   *
-   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
-   *        environment.
-   * @return A Uc2HazelcastJetBuilder factory with a set kafkaInputTopic.
-   */
-  public Uc2HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
-      final String defaultInputTopic) {
-    this.kafkaInputTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
-        defaultInputTopic);
-    return this;
-  }
-
-  /**
-   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
-   *
-   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
-   *        environment.
-   * @return A Uc2HazelcastJetBuilder factory with a set kafkaOutputTopic.
-   */
-  public Uc2HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
-      final String defaultOutputTopic) {
-    this.kafkaOutputTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
-        defaultOutputTopic);
-    return this;
-  }
-
-  /**
-   * Sets the downsample interval for the pipeline used in this builder.
-   *
-   * @param downsampleInterval the downsample interval to be used for this pipeline.
-   * @return A Uc2HazelcastJetFactory with a set downsampleInterval.
-   */
-  public Uc2HazelcastJetFactory setCustomDownsampleInterval(// NOPMD
-      final int downsampleInterval) {
-    this.downsampleInterval = downsampleInterval;
-    return this;
-  }
-
-  /**
-   * Sets the downsample interval for the pipeline used in this builder from the environment.
-   *
-   * @param defaultDownsampleInterval the default downsample interval to be used for this pipeline
-   *        when none is set in the environment.
-   * @return A Uc2HazelcastJetFactory with a set downsampleInterval.
-   */
-  public Uc2HazelcastJetFactory setDownsampleIntervalFromEnv(// NOPMD
-      final String defaultDownsampleInterval) {
-    final String downsampleInterval = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.DOWNSAMPLE_INTERVAL),
-        defaultDownsampleInterval);
-    final int downsampleIntervalNumber = Integer.parseInt(downsampleInterval);
-    this.downsampleInterval = downsampleIntervalNumber;
-    return this;
-  }
-
-}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineBuilder.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineBuilder.java
deleted file mode 100644
index 92e59d256c5f2d5b43644b2e498ca1f2dbca4202..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineBuilder.java
+++ /dev/null
@@ -1,136 +0,0 @@
-package rocks.theodolite.benchmarks.uc2.hazelcastjet;
-
-import com.google.common.math.Stats;
-import com.google.common.math.StatsAccumulator;
-import com.hazelcast.jet.aggregate.AggregateOperation;
-import com.hazelcast.jet.aggregate.AggregateOperation1;
-import com.hazelcast.jet.kafka.KafkaSinks;
-import com.hazelcast.jet.kafka.KafkaSources;
-import com.hazelcast.jet.pipeline.Pipeline;
-import com.hazelcast.jet.pipeline.Sinks;
-import com.hazelcast.jet.pipeline.StreamSource;
-import com.hazelcast.jet.pipeline.StreamStage;
-import com.hazelcast.jet.pipeline.WindowDefinition;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
-import rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics.StatsAccumulatorSupplier;
-
-
-
-/**
- * Builder to build a HazelcastJet Pipeline for UC2 which can be used for stream processing using
- * Hazelcast Jet.
- */
-public class Uc2PipelineBuilder {
-
-  /**
-   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
-   *
-   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
-   *        attributes.
-   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
-   *        attributes.
-   * @param kafkaInputTopic The name of the input topic used for the pipeline.
-   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
-   * @param downsampleIntervalInMs The window length of the tumbling window used in the aggregation
-   *        of this pipeline.
-   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
-   *         for UC2.
-   */
-  public Pipeline build(final Properties kafkaReadPropsForPipeline,
-      final Properties kafkaWritePropsForPipeline, final String kafkaInputTopic,
-      final String kafkaOutputTopic,
-      final int downsampleIntervalInMs) {
-
-    // Define a new pipeline
-    final Pipeline pipe = Pipeline.create();
-
-    // Define the Kafka Source
-    final StreamSource<Entry<String, ActivePowerRecord>> kafkaSource =
-        KafkaSources.<String, ActivePowerRecord>kafka(kafkaReadPropsForPipeline, kafkaInputTopic);
-
-    // Extend UC2 topology to the pipeline
-    final StreamStage<Map.Entry<String, String>> uc2TopologyProduct =
-        this.extendUc2Topology(pipe, kafkaSource, downsampleIntervalInMs);
-
-    // Add Sink1: Logger
-    uc2TopologyProduct.writeTo(Sinks.logger());
-    // Add Sink2: Write back to kafka for the final benchmark
-    uc2TopologyProduct.writeTo(KafkaSinks.<String, String>kafka(
-        kafkaWritePropsForPipeline, kafkaOutputTopic));
-
-    return pipe;
-  }
-
-  /**
-   * Extends to a blank Hazelcast Jet Pipeline the UC2 topology defined by theodolite.
-   *
-   * <p>
-   * UC2 takes {@code ActivePowerRecord} objects, groups them by keys, windows them in a tumbling
-   * window and aggregates them into {@code Stats} objects. The final map returns an
-   * {@code Entry<String,String>} where the key is the key of the group and the String is the
-   * {@code .toString()} representation of the {@code Stats} object.
-   * </p>
-   *
-   * @param pipe The blank hazelcast jet pipeline to extend the logic to.
-   * @param source A streaming source to fetch data from.
-   * @param downsampleIntervalInMs The size of the tumbling window.
-   * @return A {@code StreamStage<Map.Entry<String,String>>} with the above definition of the key
-   *         and value of the Entry object. It can be used to be further modified or directly be
-   *         written into a sink.
-   */
-  public StreamStage<Map.Entry<String, String>> extendUc2Topology(final Pipeline pipe,
-      final StreamSource<Entry<String, ActivePowerRecord>> source,
-      final int downsampleIntervalInMs) {
-    // Build the pipeline topology.
-    return pipe.readFrom(source)
-        .withNativeTimestamps(0)
-        .setLocalParallelism(1)
-        .groupingKey(record -> record.getValue().getIdentifier())
-        .window(WindowDefinition.tumbling(downsampleIntervalInMs))
-        .aggregate(this.uc2AggregateOperation())
-        .map(agg -> {
-          final String theKey = agg.key();
-          final String theValue = agg.getValue().toString();
-          return Map.entry(theKey, theValue);
-        });
-  }
-
-  /**
-   * Defines an AggregateOperation1 for Hazelcast Jet which is used in the Pipeline of the Hazelcast
-   * Jet implementation of UC2.
-   *
-   * <p>
-   * Takes a windowed and keyed {@code Entry<String,ActivePowerRecord>} elements and returns a
-   * {@Stats} object.
-   * </p>
-   *
-   * @return An AggregateOperation used by Hazelcast Jet in a streaming stage which aggregates
-   *         ActivePowerRecord Objects into Stats Objects.
-   */
-  public AggregateOperation1<Entry<String, ActivePowerRecord>, StatsAccumulator, Stats> uc2AggregateOperation() { // NOCS
-    // Aggregate Operation to Create a Stats Object from Entry<String,ActivePowerRecord> items using
-    // the Statsaccumulator.
-    return AggregateOperation
-        // Creates the accumulator
-        .withCreate(new StatsAccumulatorSupplier())
-        // Defines the accumulation
-        .<Entry<String, ActivePowerRecord>>andAccumulate((accumulator, item) -> {
-          accumulator.add(item.getValue().getValueInW());
-        })
-        // Defines the combination of spread out instances
-        .andCombine((left, right) -> {
-          final Stats rightStats = right.snapshot();
-          left.addAll(rightStats);
-
-        })
-        // Finishes the aggregation
-        .andExportFinish(
-            (accumulator) -> {
-              return accumulator.snapshot();
-            });
-  }
-
-}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineFactory.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..83a1a51972b74702800595857a138ad64e98977b
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineFactory.java
@@ -0,0 +1,103 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.time.Duration;
+import java.util.Map;
+import java.util.Properties;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.PipelineFactory;
+import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
+
+
+/**
+ * PipelineFactory for use case 2. Allows to build and extend a pipeline.
+ */
+public class Uc2PipelineFactory extends PipelineFactory {
+
+  private final Duration downsampleInterval;
+
+  /**
+   * Factory for uc2 pipelines.
+   *
+   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param downsampleIntervalInMs The window length of the tumbling window used in the aggregation
+   *        of this pipeline.
+   */
+  protected Uc2PipelineFactory(final Properties kafkaReadPropsForPipeline,
+      final String kafkaInputTopic,
+      final Properties kafkaWritePropsForPipeline,
+      final String kafkaOutputTopic,
+      final Duration downsampleIntervalInMs) {
+    super(kafkaReadPropsForPipeline, kafkaInputTopic,
+        kafkaWritePropsForPipeline, kafkaOutputTopic);
+    this.downsampleInterval = downsampleIntervalInMs;
+  }
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC2.
+   */
+  @Override
+  public Pipeline buildPipeline() {
+
+    // Define the Kafka Source
+    final StreamSource<Map.Entry<String, ActivePowerRecord>> kafkaSource =
+        KafkaSources.<String, ActivePowerRecord>kafka(this.kafkaReadPropsForPipeline,
+            this.kafkaInputTopic);
+
+    // Extend UC2 topology to the pipeline
+    final StreamStage<Map.Entry<String, String>> uc2TopologyProduct =
+        this.extendUc2Topology(kafkaSource);
+
+    // Add Sink1: Logger
+    // uc2TopologyProduct.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    uc2TopologyProduct.writeTo(KafkaSinks.<String, String>kafka(
+        this.kafkaWritePropsForPipeline, this.kafkaOutputTopic));
+
+    return this.pipe;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC2 topology defined by theodolite.
+   *
+   * <p>
+   * UC2 takes {@code ActivePowerRecord} objects, groups them by keys, windows them in a tumbling
+   * window and aggregates them into {@code Stats} objects. The final map returns an
+   * {@code Entry<String,String>} where the key is the key of the group and the String is the
+   * {@code .toString()} representation of the {@code Stats} object.
+   * </p>
+   *
+   * @param source A streaming source to fetch data from.
+   * @return A {@code StreamStage<Map.Entry<String,String>>} with the above definition of the key
+   *         and value of the Entry object. It can be used to be further modified or directly be
+   *         written into a sink.
+   */
+  public StreamStage<Map.Entry<String, String>> extendUc2Topology(
+      final StreamSource<Map.Entry<String, ActivePowerRecord>> source) {
+    // Build the pipeline topology.
+    return this.pipe.readFrom(source)
+        .withNativeTimestamps(0)
+        // .setLocalParallelism(1)
+        .groupingKey(record -> record.getValue().getIdentifier())
+        .window(WindowDefinition.tumbling(this.downsampleInterval.toMillis()))
+        .aggregate(StatsAggregatorFactory.create())
+        .map(agg -> {
+          final String theKey = agg.key();
+          final String theValue = agg.getValue().toString();
+          return Map.entry(theKey, theValue);
+        });
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
index e3371cc87e20e85e6e8c327955537e6e49dab86e..636584ce9c6c1b8b22a8e63252aeda0fae04f1f9 100644
--- a/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -1,8 +1,9 @@
-application.name=theodolite-uc1-application
+application.name=theodolite-uc2-application
 application.version=0.0.1
 
 kafka.bootstrap.servers=localhost:9092
 kafka.input.topic=input
-
+kafka.output.topic=output
 schema.registry.url=http://localhost:8081
 
+downsample.interval.minutes=1
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java b/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java
index 0a579c8d4ad2c872f7c90b1d6456de78b3a20f91..8b44bcd5f0451562254bac9a9a50c641702d7d52 100644
--- a/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java
@@ -11,8 +11,10 @@ import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
 import com.hazelcast.jet.pipeline.test.Assertions;
 import com.hazelcast.jet.pipeline.test.TestSources;
 import com.hazelcast.jet.test.SerialTest;
+import java.time.Duration;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Properties;
 import java.util.concurrent.CompletionException;
 import org.junit.After;
 import org.junit.Assert;
@@ -42,7 +44,7 @@ public class Uc2PipelineTest extends JetTestSupport {
     final int testItemsPerSecond = 1;
     final String testSensorName = "TEST-SENSOR";
     final Double testValueInW = 10.0;
-    final int testWindowInMs = 5000;
+    final Duration testWindow = Duration.ofSeconds(5);
 
     // Create mock jet instance with configuration
     final String testClusterName = randomName();
@@ -61,11 +63,12 @@ public class Uc2PipelineTest extends JetTestSupport {
         });
 
     // Create pipeline to test
-    final Uc2PipelineBuilder pipelineBuilder = new Uc2PipelineBuilder();
-    this.testPipeline = Pipeline.create();
-    this.uc2Topology =
-        pipelineBuilder.extendUc2Topology(this.testPipeline, testSource, testWindowInMs);
+    final Properties properties = new Properties();
+    final Uc2PipelineFactory factory = new Uc2PipelineFactory(
+        properties, "", properties, "", testWindow);
 
+    this.uc2Topology = factory.extendUc2Topology(testSource);
+    this.testPipeline = factory.getPipe();
   }
 
   /**
@@ -81,9 +84,8 @@ public class Uc2PipelineTest extends JetTestSupport {
 
     // Assertion
     this.uc2Topology.apply(Assertions.assertCollectedEventually(timeout,
-        collection -> Assert.assertTrue(
-            "Not the right amount items in Stats Object!",
-            collection.get(collection.size() - 1).getValue().equals(expectedOutput))));
+        collection -> Assert.assertEquals("Not the right amount items in Stats Object!",
+            expectedOutput, collection.get(collection.size() - 1).getValue())));
 
     // Run the test!
     try {
diff --git a/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/HistoryService.java b/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/HistoryService.java
index 4afc2d91eaaf98226f262f072cfd7e5aed6f847e..a6375688d619c9ff65c4fd97bc28702d73284f60 100644
--- a/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/HistoryService.java
+++ b/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/HistoryService.java
@@ -1,12 +1,10 @@
 package rocks.theodolite.benchmarks.uc2.kstreams;
 
 import java.time.Duration;
-import java.util.Objects;
 import java.util.concurrent.CompletableFuture;
 import org.apache.commons.configuration2.Configuration;
 import org.apache.kafka.streams.KafkaStreams;
 import rocks.theodolite.benchmarks.commons.commons.configuration.ServiceConfigurations;
-import rocks.theodolite.benchmarks.commons.kstreams.ConfigurationKeys;
 
 /**
  * A microservice that manages the history and, therefore, stores and aggregates incoming
@@ -18,8 +16,6 @@ public class HistoryService {
   private final Configuration config = ServiceConfigurations.createWithDefaults();
 
   private final CompletableFuture<Void> stopEvent = new CompletableFuture<>();
-  private final int windowDurationMinutes = Integer
-      .parseInt(Objects.requireNonNullElse(System.getenv("KAFKA_WINDOW_DURATION_MINUTES"), "60"));
 
   /**
    * Start the service.
@@ -35,8 +31,9 @@ public class HistoryService {
   private void createKafkaStreamsApplication() {
     final Uc2KafkaStreamsBuilder uc2KafkaStreamsBuilder = new Uc2KafkaStreamsBuilder(this.config);
     uc2KafkaStreamsBuilder
-        .outputTopic(this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC))
-        .windowDuration(Duration.ofMinutes(this.windowDurationMinutes));
+        .outputTopic(this.config.getString(Uc2ConfigurationKeys.KAFKA_OUTPUT_TOPIC))
+        .windowDuration(Duration.ofMinutes(
+            this.config.getInt(Uc2ConfigurationKeys.DOWNSAMPLE_INTERVAL_MINUTES)));
 
     final KafkaStreams kafkaStreams = uc2KafkaStreamsBuilder.build();
 
diff --git a/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/TopologyBuilder.java b/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/TopologyBuilder.java
index cd1d8cd92149d368a27452fa7689f5549a9c2bc7..6aa342f03364fb7f20d404796ba25165caf6a869 100644
--- a/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/TopologyBuilder.java
+++ b/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/TopologyBuilder.java
@@ -11,8 +11,6 @@ import org.apache.kafka.streams.kstream.Consumed;
 import org.apache.kafka.streams.kstream.Materialized;
 import org.apache.kafka.streams.kstream.Produced;
 import org.apache.kafka.streams.kstream.TimeWindows;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import rocks.theodolite.benchmarks.commons.kafka.avro.SchemaRegistryAvroSerdeFactory;
 import rocks.theodolite.benchmarks.commons.kstreams.GenericSerde;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
@@ -23,8 +21,6 @@ import rocks.theodolite.benchmarks.uc2.kstreams.util.StatsFactory;
  */
 public class TopologyBuilder {
 
-  private static final Logger LOGGER = LoggerFactory.getLogger(TopologyBuilder.class);
-
   private final String inputTopic;
   private final String outputTopic;
   private final SchemaRegistryAvroSerdeFactory srAvroSerdeFactory;
@@ -54,10 +50,6 @@ public class TopologyBuilder {
                 this.srAvroSerdeFactory.<ActivePowerRecord>forValues()))
         .groupByKey()
         .windowedBy(TimeWindows.ofSizeWithNoGrace(this.duration))
-        // .aggregate(
-        // () -> 0.0,
-        // (key, activePowerRecord, agg) -> agg + activePowerRecord.getValueInW(),
-        // Materialized.with(Serdes.String(), Serdes.Double()))
         .aggregate(
             () -> Stats.of(),
             (k, record, stats) -> StatsFactory.accumulate(stats, record.getValueInW()),
@@ -66,7 +58,7 @@ public class TopologyBuilder {
                 GenericSerde.from(Stats::toByteArray, Stats::fromByteArray)))
         .toStream()
         .map((k, s) -> KeyValue.pair(k.key(), s.toString()))
-        .peek((k, v) -> LOGGER.info(k + ": " + v))
+        // .peek((k, v) -> LOGGER.info(k + ": " + v))
         .to(this.outputTopic, Produced.with(Serdes.String(), Serdes.String()));
 
     return this.builder.build(properties);
diff --git a/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/Uc2ConfigurationKeys.java b/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/Uc2ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..1de4827b4b1911e1c883e9503d086aa2d5909cfa
--- /dev/null
+++ b/theodolite-benchmarks/uc2-kstreams/src/main/java/rocks/theodolite/benchmarks/uc2/kstreams/Uc2ConfigurationKeys.java
@@ -0,0 +1,14 @@
+package rocks.theodolite.benchmarks.uc2.kstreams;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class Uc2ConfigurationKeys {
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String DOWNSAMPLE_INTERVAL_MINUTES = "downsample.interval.minutes";
+
+  private Uc2ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc2-kstreams/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-kstreams/src/main/resources/META-INF/application.properties
index 1b59528db59653d8dc0c2a04d242a0cd39fe07da..afba990c187e16e9fd98b310b55e345e9ea78864 100644
--- a/theodolite-benchmarks/uc2-kstreams/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc2-kstreams/src/main/resources/META-INF/application.properties
@@ -4,6 +4,9 @@ application.version=0.0.1
 kafka.bootstrap.servers=localhost:9092
 kafka.input.topic=input
 kafka.output.topic=output
-kafka.window.duration.minutes=1
-
 schema.registry.url=http://localhost:8081
+
+downsample.interval.minutes=1
+
+# Kafka Streams Config
+commit.interval.ms=5000	
diff --git a/theodolite-benchmarks/uc3-beam-samza/Dockerfile b/theodolite-benchmarks/uc3-beam-samza/Dockerfile
index 54979b8e1fa8aa9ac7d073302301bd10cbff5f34..02ac459e5197fc0d66bb0d70840a88d32d8084c3 100644
--- a/theodolite-benchmarks/uc3-beam-samza/Dockerfile
+++ b/theodolite-benchmarks/uc3-beam-samza/Dockerfile
@@ -1,8 +1,9 @@
 FROM openjdk:11-slim
 
 ENV MAX_SOURCE_PARALLELISM=1024
+ENV ENABLE_METRICS=true
 
 ADD build/distributions/uc3-beam-samza.tar /
 ADD samza-standalone.properties /
 
-CMD /uc3-beam-samza/bin/uc3-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
+CMD /uc3-beam-samza/bin/uc3-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=$ENABLE_METRICS --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/MapTimeFormat.java b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/MapTimeFormat.java
index 0cad038141bd7b30d765520403529e9184bbcb86..d6429d758e7eba67f6d4ecd0335067c840a42b2e 100644
--- a/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/MapTimeFormat.java
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/MapTimeFormat.java
@@ -8,7 +8,7 @@ import org.apache.beam.sdk.values.KV;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
 
 /**
- * Changes the time format to us Europe/Paris time.
+ * Maps the key of incoming records from pure sensor ID strings to {@link HourOfDayKey}s.
  */
 public class MapTimeFormat
     extends SimpleFunction<KV<String, ActivePowerRecord>, KV<HourOfDayKey, ActivePowerRecord>> {
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/PipelineFactory.java b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/PipelineFactory.java
index f6587be4c4660a2e34f34efdaa417a7080073d0e..d154b7487b398dcd40f13a1322b7f242053c18a7 100644
--- a/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/PipelineFactory.java
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/PipelineFactory.java
@@ -41,14 +41,14 @@ public class PipelineFactory extends AbstractPipelineFactory {
 
   @Override
   protected void constructPipeline(final Pipeline pipeline) {
-    final String outputTopic = this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+    final String outputTopic = this.config.getString(Uc3ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
 
     final Duration duration =
-        Duration.standardDays(this.config.getInt(ConfigurationKeys.AGGREGATION_DURATION_DAYS));
+        Duration.standardDays(this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_DURATION_DAYS));
     final Duration aggregationAdvanceDuration =
-        Duration.standardDays(this.config.getInt(ConfigurationKeys.AGGREGATION_ADVANCE_DAYS));
+        Duration.standardDays(this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_ADVANCE_DAYS));
     final Duration triggerDelay =
-        Duration.standardSeconds(this.config.getInt(ConfigurationKeys.TRIGGER_INTERVAL));
+        Duration.standardSeconds(this.config.getInt(Uc3ConfigurationKeys.TRIGGER_INTERVAL_SECONDS));
 
     // Read from Kafka
     final KafkaActivePowerTimestampReader kafkaReader = super.buildKafkaReader();
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/StatsAggregation.java b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/StatsAggregation.java
index 4fca536baf6db33e57700263cefb837ca8eb5b8b..0c3822fc52841a28e27ef9e7acb548d66c301a76 100644
--- a/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/StatsAggregation.java
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/StatsAggregation.java
@@ -8,7 +8,6 @@ import org.apache.beam.sdk.coders.DefaultCoder;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
 
-
 /**
  * Aggregation Class for ActivePowerRecords. Creates a StatsAccumulator based on the ValueInW.
  */
@@ -16,6 +15,7 @@ import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
 @DefaultCoder(AvroCoder.class)
 public class StatsAggregation extends CombineFn<ActivePowerRecord, StatsAccumulator, Stats>
     implements Serializable {
+
   private static final long serialVersionUID = 1L;
 
   @Override
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/Uc3ConfigurationKeys.java b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/Uc3ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..09bc8a26b6261400f7d341d53146e2435aee3507
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/Uc3ConfigurationKeys.java
@@ -0,0 +1,18 @@
+package rocks.theodolite.benchmarks.uc3.beam;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class Uc3ConfigurationKeys {
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String AGGREGATION_DURATION_DAYS = "aggregation.duration.days";
+
+  public static final String AGGREGATION_ADVANCE_DAYS = "aggregation.advance.days";
+
+  public static final String TRIGGER_INTERVAL_SECONDS = "aggregation.trigger.interval.seconds";
+
+  private Uc3ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc3-beam/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-beam/src/main/resources/META-INF/application.properties
index 0fe4b240d97f087f00c28430740488f7e01f1577..54af67893c54bda1a23b7fd428e7c0f9931a1aa6 100644
--- a/theodolite-benchmarks/uc3-beam/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc3-beam/src/main/resources/META-INF/application.properties
@@ -4,20 +4,16 @@ application.version=0.0.1
 kafka.bootstrap.servers=localhost:9092
 kafka.input.topic=input
 kafka.output.topic=output
-kafka.window.duration.minutes=1
 
 schema.registry.url=http://localhost:8081
 
 aggregation.duration.days=30
 aggregation.advance.days=1
-
-trigger.interval=15
-
-num.threads=1
-commit.interval.ms=1000
-cache.max.bytes.buffering=-1
+aggregation.trigger.interval.seconds=15
 
 specific.avro.reader=true
+
+# Kafka Settings
 enable.auto.commit=true
 max.poll.records=500
-auto.offset.reset=earliest
\ No newline at end of file
+auto.offset.reset=earliest
diff --git a/theodolite-benchmarks/uc3-flink/Dockerfile b/theodolite-benchmarks/uc3-flink/Dockerfile
index cef05c0296f55f0cf7391dd35dd1806ec0efa287..744ad389e15a093c8eb1e1ce7ae7352f69c30c33 100644
--- a/theodolite-benchmarks/uc3-flink/Dockerfile
+++ b/theodolite-benchmarks/uc3-flink/Dockerfile
@@ -1,3 +1,3 @@
 FROM flink:1.13-java11
 
-ADD build/libs/uc3-flink-all.jar /opt/flink/usrlib/artifacts/uc3-flink-all.jar
\ No newline at end of file
+ADD build/libs/uc3-flink-all.jar /opt/flink/usrlib/artifacts/uc3-flink-all.jar
diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/ConfigurationKeys.java b/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/ConfigurationKeys.java
deleted file mode 100644
index 980f07b9b1478bd2c5fa74c89d1aaff4c10f60df..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/ConfigurationKeys.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package rocks.theodolite.benchmarks.uc3.flink;
-
-/**
- * Keys to access configuration parameters.
- */
-public final class ConfigurationKeys {
-
-  public static final String APPLICATION_NAME = "application.name";
-
-  public static final String APPLICATION_VERSION = "application.version";
-
-  public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
-
-  public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic";
-
-  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
-
-  public static final String SCHEMA_REGISTRY_URL = "schema.registry.url";
-
-  public static final String AGGREGATION_DURATION_DAYS = "aggregation.duration.days";
-
-  public static final String AGGREGATION_ADVANCE_DAYS = "aggregation.advance.days";
-
-  public static final String COMMIT_INTERVAL_MS = "commit.interval.ms";
-
-  public static final String TIME_ZONE = "time.zone";
-
-  public static final String FLINK_STATE_BACKEND = "flink.state.backend";
-
-  public static final String FLINK_STATE_BACKEND_PATH = "flink.state.backend.path";
-
-  public static final String FLINK_STATE_BACKEND_MEMORY_SIZE = // NOPMD
-      "flink.state.backend.memory.size";
-
-  public static final String CHECKPOINTING = "checkpointing";
-
-  public static final String PARALLELISM = "parallelism";
-
-  private ConfigurationKeys() {}
-
-}
diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/HistoryServiceFlinkJob.java b/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/HistoryServiceFlinkJob.java
index d80f64fafb69d3e0287347a8f90080584d4fcd82..9f90f45f3b9ee8353b2e4a320024c18828198059 100644
--- a/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/HistoryServiceFlinkJob.java
+++ b/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/HistoryServiceFlinkJob.java
@@ -9,12 +9,14 @@ import org.apache.flink.api.java.functions.KeySelector;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
 import org.apache.flink.streaming.api.windowing.time.Time;
+import org.apache.flink.streaming.api.windowing.triggers.ContinuousProcessingTimeTrigger;
 import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
 import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
 import org.apache.kafka.common.serialization.Serdes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import rocks.theodolite.benchmarks.commons.flink.AbstractFlinkService;
+import rocks.theodolite.benchmarks.commons.flink.ConfigurationKeys;
 import rocks.theodolite.benchmarks.commons.flink.KafkaConnectorFactory;
 import rocks.theodolite.benchmarks.commons.flink.serialization.StatsSerializer;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
@@ -49,12 +51,14 @@ public final class HistoryServiceFlinkJob extends AbstractFlinkService {
     final String kafkaBroker = this.config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
     final String schemaRegistryUrl = this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL);
     final String inputTopic = this.config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
-    final String outputTopic = this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
-    final ZoneId timeZone = ZoneId.of(this.config.getString(ConfigurationKeys.TIME_ZONE));
+    final String outputTopic = this.config.getString(Uc3ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+    final ZoneId timeZone = ZoneId.of(this.config.getString(Uc3ConfigurationKeys.TIME_ZONE));
     final Time aggregationDuration =
-        Time.days(this.config.getInt(ConfigurationKeys.AGGREGATION_DURATION_DAYS));
+        Time.days(this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_DURATION_DAYS));
     final Time aggregationAdvance =
-        Time.days(this.config.getInt(ConfigurationKeys.AGGREGATION_ADVANCE_DAYS));
+        Time.days(this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_ADVANCE_DAYS));
+    final Time triggerDuration =
+        Time.seconds(this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_TRIGGER_INTERVAL_SECONDS));
     final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true);
 
     final KafkaConnectorFactory kafkaConnector = new KafkaConnectorFactory(
@@ -80,13 +84,14 @@ public final class HistoryServiceFlinkJob extends AbstractFlinkService {
           return keyFactory.createKey(record.getIdentifier(), dateTime);
         })
         .window(SlidingEventTimeWindows.of(aggregationDuration, aggregationAdvance))
+        .trigger(ContinuousProcessingTimeTrigger.of(triggerDuration))
         .aggregate(new StatsAggregateFunction(), new HourOfDayProcessWindowFunction())
         .map(tuple -> {
-          final String newKey = keyFactory.getSensorId(tuple.f0);
-          final String newValue = tuple.f1.toString();
-          final int hourOfDay = tuple.f0.getHourOfDay();
-          LOGGER.info("{}|{}: {}", newKey, hourOfDay, newValue);
-          return new Tuple2<>(newKey, newValue);
+          final String sensorId = keyFactory.getSensorId(tuple.f0);
+          final String stats = tuple.f1.toString();
+          // final int hourOfDay = tuple.f0.getHourOfDay();
+          // LOGGER.info("{}|{}: {}", newKey, hourOfDay, newValue);
+          return new Tuple2<>(sensorId, stats);
         })
         .name("map")
         .returns(Types.TUPLE(Types.STRING, Types.STRING))
diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/Uc3ConfigurationKeys.java b/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/Uc3ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..e3e167cbc7ee8e043856ed3c5ce4f882e882c2c8
--- /dev/null
+++ b/theodolite-benchmarks/uc3-flink/src/main/java/rocks/theodolite/benchmarks/uc3/flink/Uc3ConfigurationKeys.java
@@ -0,0 +1,21 @@
+package rocks.theodolite.benchmarks.uc3.flink;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class Uc3ConfigurationKeys {
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String AGGREGATION_DURATION_DAYS = "aggregation.duration.days";
+
+  public static final String AGGREGATION_ADVANCE_DAYS = "aggregation.advance.days";
+
+  public static final String AGGREGATION_TRIGGER_INTERVAL_SECONDS = // NOPMD
+      "aggregation.trigger.interval.seconds";
+
+  public static final String TIME_ZONE = "time.zone";
+
+  private Uc3ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc3-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-flink/src/main/resources/META-INF/application.properties
index 6b6874674ce6a0abea73ea6d983c00c15deb8bb1..1d91a3b492fb2a4aee23476f43f74796f0e5d8df 100644
--- a/theodolite-benchmarks/uc3-flink/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc3-flink/src/main/resources/META-INF/application.properties
@@ -5,9 +5,11 @@ kafka.bootstrap.servers=localhost:9092
 kafka.input.topic=input
 kafka.output.topic=output
 schema.registry.url=http://localhost:8081
+
+time.zone=Europe/Paris
 aggregation.duration.days=30
 aggregation.advance.days=1
-num.threads=1
-commit.interval.ms=100
-cache.max.bytes.buffering=-1
-time.zone=Europe/Paris
\ No newline at end of file
+aggregation.trigger.interval.seconds=15
+
+# Flink configuration
+checkpointing.interval.ms=1000
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java
index ecf38bd6c6a85e6d0f1431708a69f3431aff4730..c9688c287f6454a9fca1bc9d222bb103aadadb75 100644
--- a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java
@@ -1,72 +1,67 @@
 package rocks.theodolite.benchmarks.uc3.hazelcastjet;
 
+import com.google.common.math.StatsAccumulator;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.time.Duration;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.HazelcastJetService;
 
 /**
- * A microservice that manages the history and, therefore, stores and aggregates incoming
- * measurements.
+ * A microservice that aggregate incoming messages in a sliding window.
  */
-public class HistoryService {
+public class HistoryService extends HazelcastJetService {
 
   private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
 
-  // Hazelcast settings (default)
-  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
-  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+  /**
+   * Constructs the use case logic for UC3. Retrieves the needed values and instantiates a pipeline
+   * factory.
+   */
+  public HistoryService() {
+    super(LOGGER);
+    final Properties kafkaProps =
+        this.propsBuilder.buildReadProperties(
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
 
-  // Kafka settings (default)
-  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
-  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
-  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
-  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
-  
-  // UC3 specific (default)
-  private static final String WINDOW_SIZE_IN_SECONDS_DEFAULT = "2629800";
-  private static final String HOPSIZE_IN_SEC_DEFAULT = "86400";
+    final Properties kafkaWriteProps =
+        this.propsBuilder.buildWriteProperties(
+            StringSerializer.class.getCanonicalName(),
+            StringSerializer.class.getCanonicalName());
 
-  // Job name (default)
-  private static final String JOB_NAME = "uc3-hazelcastjet";
+    final String kafkaOutputTopic =
+        this.config.getString(Uc3ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
 
-  /**
-   * Entrypoint for UC3 using Gradle Run.
-   */
-  public static void main(final String[] args) {
-    final HistoryService uc3HistoryService = new HistoryService();
-    try {
-      uc3HistoryService.run();
-    } catch (final Exception e) { // NOPMD
-      LOGGER.error("ABORT MISSION!: {}", e);
-    }
-  }
+    final Duration windowSize = Duration.ofDays(
+        this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_DURATION_DAYS));
 
-  /**
-   * Start a UC3 service.
-   *
-   * @throws Exception This Exception occurs if the Uc3HazelcastJetFactory is used in the wrong way.
-   *         Detailed data is provided once an Exception occurs.
-   */
-  public void run() throws Exception { // NOPMD
-    this.createHazelcastJetApplication();
+    final Duration hoppingSize = Duration.ofDays(
+        this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_ADVANCE_DAYS));
+
+    final Duration emitPeriod = Duration.ofSeconds(
+        this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_EMIT_PERIOD_SECONDS));
+
+    this.pipelineFactory = new Uc3PipelineFactory(
+        kafkaProps,
+        this.kafkaInputTopic,
+        kafkaWriteProps,
+        kafkaOutputTopic,
+        windowSize,
+        hoppingSize,
+        emitPeriod);
   }
 
-  /**
-   * Creates a Hazelcast Jet Application for UC3 using the Uc3HazelcastJetFactory.
-   *
-   * @throws Exception This Exception occurs if the Uc3HazelcastJetFactory is used in the wrong way.
-   *         Detailed data is provided once an Exception occurs.
-   */
-  private void createHazelcastJetApplication() throws Exception { // NOPMD
-    new Uc3HazelcastJetFactory()
-        .setReadPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT, JOB_NAME)
-        .setWritePropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
-        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
-        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
-        .setWindowSizeInSecondsFromEnv(WINDOW_SIZE_IN_SECONDS_DEFAULT)
-        .setHoppingSizeInSecondsFromEnv(HOPSIZE_IN_SEC_DEFAULT)
-        .buildUc3Pipeline()
-        .buildUc3JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
-        .runUc3Job(JOB_NAME);
+  @Override
+  protected void registerSerializer() {
+    this.jobConfig.registerSerializer(HourOfDayKey.class, HourOfDayKeySerializer.class);
+    this.jobConfig.registerSerializer(StatsAccumulator.class, StatsAccumulatorSerializer.class);
   }
 
+  public static void main(final String[] args) {
+    new HistoryService().run();
+  }
 }
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKey.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HourOfDayKey.java
similarity index 93%
rename from theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKey.java
rename to theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HourOfDayKey.java
index c69f433f3af7ec0484c254af9e59e7d284379cb0..6fe3343ce0f6dc4d9828a0147dce9e328ad76b02 100644
--- a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKey.java
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HourOfDayKey.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
 
 import java.util.Objects;
 
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HoursOfDayKeyFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HourOfDayKeyFactory.java
similarity index 53%
rename from theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HoursOfDayKeyFactory.java
rename to theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HourOfDayKeyFactory.java
index 4eddb85efebf5b8b07317d0cd39f36b90d3f4fcd..dda447d3e4c741d474e77299f38f8cc94f49209e 100644
--- a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HoursOfDayKeyFactory.java
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HourOfDayKeyFactory.java
@@ -1,12 +1,14 @@
-package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
 
+import java.io.Serializable;
 import java.time.LocalDateTime;
 
 /**
- * A factory class to build an {@link HourOfDayKey}.
- *
+ * {@link StatsKeyFactory} for {@link HourOfDayKey}.
  */
-public class HoursOfDayKeyFactory implements StatsKeyFactory<HourOfDayKey> {
+public class HourOfDayKeyFactory implements StatsKeyFactory<HourOfDayKey>, Serializable {
+
+  private static final long serialVersionUID = 9047643205410220184L;
 
   @Override
   public HourOfDayKey createKey(final String sensorId, final LocalDateTime dateTime) {
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKeySerializer.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HourOfDayKeySerializer.java
similarity index 92%
rename from theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKeySerializer.java
rename to theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HourOfDayKeySerializer.java
index 91ba3f2be26f4317a1dec81caf9080da8c1edc9c..c689bab16b7392dc8d958bee61c4fdecb50ba0b7 100644
--- a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKeySerializer.java
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HourOfDayKeySerializer.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
 
 import com.hazelcast.nio.ObjectDataInput;
 import com.hazelcast.nio.ObjectDataOutput;
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/MapTimeKeyConfiguration.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/MapTimeKeyConfiguration.java
new file mode 100644
index 0000000000000000000000000000000000000000..9199dd7c1a0f398e7376da027bf2d7b3b77176e6
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/MapTimeKeyConfiguration.java
@@ -0,0 +1,33 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import java.time.ZoneId;
+
+/**
+ * Stores a configuration consisting of a {@link StatsKeyFactory} and a {@link ZoneId}.
+ */
+public class MapTimeKeyConfiguration {
+
+  private final StatsKeyFactory<HourOfDayKey> keyFactory;
+  private final ZoneId zone;
+
+  /**
+   * Create a {@link MapTimeKeyConfiguration} for the supplied {@link StatsKeyFactory} and
+   * {@link ZoneId}.
+   */
+  public MapTimeKeyConfiguration(
+      final StatsKeyFactory<HourOfDayKey> keyFactory,
+      final ZoneId zone) {
+    super();
+    this.keyFactory = keyFactory;
+    this.zone = zone;
+  }
+
+  public StatsKeyFactory<HourOfDayKey> getKeyFactory() {
+    return this.keyFactory;
+  }
+
+  public ZoneId getZone() {
+    return this.zone;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsAccumulatorSerializer.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsAccumulatorSerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..c6957ac1e84fbd08a98e15ebad340f0ea63fd1de
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsAccumulatorSerializer.java
@@ -0,0 +1,37 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+
+/**
+ * A serializer and deserializer for the {@link StatsAccumulator}.
+ */
+public class StatsAccumulatorSerializer implements StreamSerializer<StatsAccumulator> {
+
+  private static final int TYPE_ID = 69_420;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final StatsAccumulator object) throws IOException {
+    final byte[] byteArray = object.snapshot().toByteArray();
+    out.writeByteArray(byteArray);
+  }
+
+  @Override
+  public StatsAccumulator read(final ObjectDataInput in) throws IOException {
+    final byte[] byteArray = in.readByteArray();
+    final Stats deserializedStats = Stats.fromByteArray(byteArray);
+    final StatsAccumulator accumulator = new StatsAccumulator();
+    accumulator.addAll(deserializedStats);
+    return accumulator;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsAccumulatorSupplier.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsAccumulatorSupplier.java
new file mode 100644
index 0000000000000000000000000000000000000000..8143fb6a0486f2bd417f9409f88f62aed726e6f7
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsAccumulatorSupplier.java
@@ -0,0 +1,21 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.function.SupplierEx;
+
+/**
+ * Supplies a {@link StatsAccumulator}.
+ */
+public class StatsAccumulatorSupplier implements SupplierEx<StatsAccumulator> {
+
+  private static final long serialVersionUID = -656395626316842910L; // NOPMD
+
+  /**
+   * Gets a {@link StatsAccumulator}.
+   */
+  @Override
+  public StatsAccumulator getEx() throws Exception {
+    return new StatsAccumulator();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsAggregatorFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsAggregatorFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..1588668a986d2ee1688a26edac874da9d4295b28
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsAggregatorFactory.java
@@ -0,0 +1,50 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.jet.aggregate.AggregateOperation;
+import com.hazelcast.jet.aggregate.AggregateOperation1;
+import java.util.Map.Entry;
+import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
+
+
+/**
+ * Factory for creating an aggregation operator for {@link Stats} objects.
+ */
+public final class StatsAggregatorFactory {
+
+  private StatsAggregatorFactory() {}
+
+  /**
+   * Defines an AggregateOperation1 for Hazelcast Jet which is used in the Pipeline of the Hazelcast
+   * Jet implementation of UC2.
+   *
+   * <p>
+   * Takes a windowed and keyed {@code Entry<String,ActivePowerRecord>} elements and returns a
+   * {@link Stats} object.
+   * </p>
+   *
+   * @return An AggregateOperation used by Hazelcast Jet in a streaming stage which aggregates
+   *         ActivePowerRecord Objects into Stats Objects.
+   */
+  public static AggregateOperation1<Entry<?, ActivePowerRecord>, StatsAccumulator, Stats> // NOCS
+      create() {
+    // Aggregate Operation to Create a Stats Object from Entry<?,ActivePowerRecord> items using
+    // the StatsAccumulator.
+    return AggregateOperation
+        // Creates the accumulator
+        .withCreate(new StatsAccumulatorSupplier())
+        // Defines the accumulation
+        .<Entry<?, ActivePowerRecord>>andAccumulate((accumulator, item) -> {
+          accumulator.add(item.getValue().getValueInW());
+        })
+        // Defines the combination of spread out instances
+        .andCombine((left, right) -> {
+          final Stats rightStats = right.snapshot();
+          left.addAll(rightStats);
+
+        })
+        // Finishes the aggregation
+        .andExportFinish(StatsAccumulator::snapshot);
+  }
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/StatsKeyFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsKeyFactory.java
similarity index 82%
rename from theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/StatsKeyFactory.java
rename to theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsKeyFactory.java
index 2a404781e5916473604f14f87b9c3eccf9eda342..b731ee27509e6e303437fdff9f8c9327ef99dea3 100644
--- a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/StatsKeyFactory.java
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/StatsKeyFactory.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
 
 import java.time.LocalDateTime;
 
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3ConfigurationKeys.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..86978513fedbbe64dc5818e16e306f75d7aeb65c
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3ConfigurationKeys.java
@@ -0,0 +1,17 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+/**
+ * Configuration Keys used for Hazelcast Jet Benchmark implementations.
+ */
+public class Uc3ConfigurationKeys {
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String AGGREGATION_DURATION_DAYS = "aggregation.duration.days";
+
+  public static final String AGGREGATION_ADVANCE_DAYS = "aggregation.advance.days";
+
+  public static final String AGGREGATION_EMIT_PERIOD_SECONDS = // NOPMD
+      "aggregation.emit.period.seconds";
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3HazelcastJetFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3HazelcastJetFactory.java
deleted file mode 100644
index d87aa19d316b278160dbc92b19b9be3d40a41d61..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3HazelcastJetFactory.java
+++ /dev/null
@@ -1,341 +0,0 @@
-package rocks.theodolite.benchmarks.uc3.hazelcastjet;
-
-import com.hazelcast.jet.JetInstance;
-import com.hazelcast.jet.config.JobConfig;
-import com.hazelcast.jet.pipeline.Pipeline;
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
-import java.util.Objects;
-import java.util.Properties;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.apache.kafka.common.serialization.StringSerializer;
-import org.slf4j.Logger;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
-import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKey;
-import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKeySerializer;
-
-/**
- * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC3
- * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
- * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
- * the Read and Write Properties, set the input and output topic, and set the window size in seconds
- * and the hopping size in seconds. This can be done using internal functions of this factory.
- * Outside data only refers to custom values or default values in case data of the environment
- * cannot the fetched.
- */
-public class Uc3HazelcastJetFactory { // NOPMD
-
-  // Information per History Service
-  private Properties kafkaReadPropsForPipeline;
-  private Properties kafkaWritePropsForPipeline;
-  private String kafkaInputTopic;
-  private String kafkaOutputTopic;
-  private JetInstance uc3JetInstance;
-  private Pipeline uc3JetPipeline;
-  // UC3 specific
-  private int windowSizeInSeconds;
-  private int hoppingSizeInSeconds;
-
-  /////////////////////////////////////
-  // Layer 1 - Hazelcast Jet Run Job //
-  /////////////////////////////////////
-
-  /**
-   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
-   * JetInstance as a job.
-   *
-   * @param jobName The name of the job.
-   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
-   */
-  public void runUc3Job(final String jobName) throws IllegalStateException { // NOPMD
-
-    // Check if a Jet Instance for UC3 is set.
-    if (this.uc3JetInstance == null) {
-      throw new IllegalStateException("Jet Instance is not set! "
-          + "Cannot start a hazelcast jet job for UC3.");
-    }
-
-    // Check if a Pipeline for UC3 is set.
-    if (this.uc3JetPipeline == null) {
-      throw new IllegalStateException(
-          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC3.");
-    }
-
-    // Adds the job name and joins a job to the JetInstance defined in this factory
-    final JobConfig jobConfig = new JobConfig()
-        .registerSerializer(HourOfDayKey.class, HourOfDayKeySerializer.class)
-        .setName(jobName);
-    this.uc3JetInstance.newJobIfAbsent(this.uc3JetPipeline, jobConfig).join();
-  }
-
-  /////////////
-  // Layer 2 //
-  /////////////
-
-  /**
-   * Build a Hazelcast JetInstance used to run a job on.
-   *
-   * @param logger The logger specified for this JetInstance.
-   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
-   *        environment.
-   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
-   * @return A Uc3HazelcastJetFactory containing a set JetInstance.
-   */
-  public Uc3HazelcastJetFactory buildUc3JetInstanceFromEnv(final Logger logger,
-      final String bootstrapServerDefault,
-      final String hzKubernetesServiceDnsKey) {
-    this.uc3JetInstance = new JetInstanceBuilder()
-        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
-        .build();
-    return this;
-  }
-
-  /**
-   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
-   * topic and kafka properties defined in this factory beforehand.
-   *
-   * @return A Uc3HazelcastJetFactory containg a set pipeline.
-   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
-   *         cannot be built.
-   */
-  public Uc3HazelcastJetFactory buildUc3Pipeline() throws IllegalStateException { // NOPMD
-
-    final String defaultPipelineWarning = "Cannot build pipeline."; // NOPMD
-
-    // Check if Properties for the Kafka Input are set.
-    if (this.kafkaReadPropsForPipeline == null) {
-      throw new IllegalStateException("Kafka Read Properties for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if Properties for the Kafka Output are set.
-    if (this.kafkaWritePropsForPipeline == null) {
-      throw new IllegalStateException("Kafka Write Properties for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the Kafka input topic is set.
-    if (this.kafkaInputTopic == null) {
-      throw new IllegalStateException("Kafka input topic for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the Kafka output topic is set.
-    if (this.kafkaOutputTopic == null) {
-      throw new IllegalStateException("kafka output topic for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the window size for the "sliding" window is set.
-    if (this.windowSizeInSeconds <= 0) {
-      throw new IllegalStateException(
-          "window size in seconds for pipeline not set or not greater than 0! "
-              + defaultPipelineWarning);
-    }
-
-    // Check if the hopping distance for the "sliding" window is set.
-    if (this.hoppingSizeInSeconds <= 0) {
-      throw new IllegalStateException(
-          "hopping size in seconds for pipeline not set or not greater than 0! "
-              + defaultPipelineWarning);
-    }
-
-    // Build Pipeline Using the pipelineBuilder
-    final Uc3PipelineBuilder pipeBuilder = new Uc3PipelineBuilder();
-    this.uc3JetPipeline =
-        pipeBuilder.build(this.kafkaReadPropsForPipeline,
-            this.kafkaWritePropsForPipeline,
-            this.kafkaInputTopic, this.kafkaOutputTopic, this.hoppingSizeInSeconds,
-            this.windowSizeInSeconds);
-    // Return Uc3HazelcastJetBuilder factory
-    return this;
-  }
-
-  /////////////
-  // Layer 3 //
-  /////////////
-
-  /**
-   * Sets kafka read properties for pipeline used in this builder.
-   *
-   * @param kafkaReadProperties A propeties object containing necessary values used for the hazelcst
-   *        jet kafka connection to read data.
-   * @return The Uc3HazelcastJetBuilder factory with set kafkaReadPropsForPipeline.
-   */
-  public Uc3HazelcastJetFactory setCustomReadProperties(// NOPMD
-      final Properties kafkaReadProperties) {
-    this.kafkaReadPropsForPipeline = kafkaReadProperties;
-    return this;
-  }
-
-  /**
-   * Sets kafka write properties for pipeline used in this builder.
-   *
-   * @param kafkaWriteProperties A propeties object containing necessary values used for the
-   *        hazelcst jet kafka connection to write data.
-   * @return The Uc3HazelcastJetBuilder factory with set kafkaWritePropsForPipeline.
-   */
-  public Uc3HazelcastJetFactory setCustomWriteProperties(// NOPMD
-      final Properties kafkaWriteProperties) {
-    this.kafkaWritePropsForPipeline = kafkaWriteProperties;
-    return this;
-  }
-
-  /**
-   * Sets kafka read properties for pipeline used in this builder using environment variables.
-   *
-   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
-   *        can be fetched from the environment.
-   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
-   *        url can be fetched from the environment.
-   * @return The Uc3HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
-   */
-  public Uc3HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
-      final String bootstrapServersDefault,
-      final String schemaRegistryUrlDefault,
-      final String jobName) {
-    // Use KafkaPropertiesBuilder to build a properties object used for kafka
-    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
-    final Properties kafkaReadProps =
-        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
-            schemaRegistryUrlDefault,
-            jobName,
-            StringDeserializer.class.getCanonicalName(),
-            KafkaAvroDeserializer.class.getCanonicalName());
-    this.kafkaReadPropsForPipeline = kafkaReadProps;
-    return this;
-  }
-
-  /**
-   * Sets kafka write properties for pipeline used in this builder using environment variables.
-   *
-   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
-   *        can be fetched from the environment.
-   * @return The Uc3HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
-   */
-  public Uc3HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
-      final String bootstrapServersDefault, final String schemaRegistryUrlDefault) {
-    // Use KafkaPropertiesBuilder to build a properties object used for kafka
-    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
-    final Properties kafkaWriteProps =
-        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault,
-            schemaRegistryUrlDefault,
-            StringSerializer.class.getCanonicalName(),
-            StringSerializer.class.getCanonicalName());
-    this.kafkaWritePropsForPipeline = kafkaWriteProps;
-    return this;
-  }
-
-  /**
-   * Sets the kafka input topic for the pipeline used in this builder.
-   *
-   * @param inputTopic The kafka topic used as the pipeline input.
-   * @return A Uc3HazelcastJetBuilder factory with a set kafkaInputTopic.
-   */
-  public Uc3HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
-      final String inputTopic) {
-    this.kafkaInputTopic = inputTopic;
-    return this;
-  }
-
-  /**
-   * Sets the kafka input output for the pipeline used in this builder.
-   *
-   * @param outputTopic The kafka topic used as the pipeline output.
-   * @return A Uc3HazelcastJetBuilder factory with a set kafkaOutputTopic.
-   */
-  public Uc3HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
-    this.kafkaOutputTopic = outputTopic;
-    return this;
-  }
-
-
-  /**
-   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
-   *
-   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
-   *        environment.
-   * @return A Uc3HazelcastJetBuilder factory with a set kafkaInputTopic.
-   */
-  public Uc3HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
-      final String defaultInputTopic) {
-    this.kafkaInputTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
-        defaultInputTopic);
-    return this;
-  }
-
-  /**
-   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
-   *
-   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
-   *        environment.
-   * @return A Uc3HazelcastJetBuilder factory with a set kafkaOutputTopic.
-   */
-  public Uc3HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
-      final String defaultOutputTopic) {
-    this.kafkaOutputTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
-        defaultOutputTopic);
-    return this;
-  }
-
-  /**
-   * Sets the window size in seconds for the pipeline used in this builder.
-   *
-   * @param windowSizeInSeconds the windowSizeInSeconds to be used for this pipeline.
-   * @return A Uc3HazelcastJetFactory with a set windowSizeInSeconds.
-   */
-  public Uc3HazelcastJetFactory setCustomWindowSizeInSeconds(// NOPMD
-      final int windowSizeInSeconds) {
-    this.windowSizeInSeconds = windowSizeInSeconds;
-    return this;
-  }
-
-  /**
-   * Sets the window size in seconds for the pipeline used in this builder from the environment.
-   *
-   * @param defaultWindowSizeInSeconds the default window size in seconds to be used for this
-   *        pipeline when none is set in the environment.
-   * @return A Uc3HazelcastJetFactory with a set windowSizeInSeconds.
-   */
-  public Uc3HazelcastJetFactory setWindowSizeInSecondsFromEnv(// NOPMD
-      final String defaultWindowSizeInSeconds) {
-    final String windowSizeInSeconds = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.WINDOW_SIZE_IN_SECONDS),
-        defaultWindowSizeInSeconds);
-    final int windowSizeInSecondsNumber = Integer.parseInt(windowSizeInSeconds);
-    this.windowSizeInSeconds = windowSizeInSecondsNumber;
-    return this;
-  }
-
-  /**
-   * Sets the hopping size in seconds for the pipeline used in this builder.
-   *
-   * @param hoppingSizeInSeconds the hoppingSizeInSeconds to be used for this pipeline.
-   * @return A Uc3HazelcastJetFactory with a set hoppingSizeInSeconds.
-   */
-  public Uc3HazelcastJetFactory setCustomHoppingSizeInSeconds(// NOPMD
-      final int hoppingSizeInSeconds) {
-    this.hoppingSizeInSeconds = hoppingSizeInSeconds;
-    return this;
-  }
-
-  /**
-   * Sets the hopping size in seconds for the pipeline used in this builder from the environment.
-   *
-   * @param defaultHoppingSizeInSeconds the default hopping size in seconds to be used for this
-   *        pipeline when none is set in the environment.
-   * @return A Uc3HazelcastJetFactory with a set hoppingSizeInSeconds.
-   */
-  public Uc3HazelcastJetFactory setHoppingSizeInSecondsFromEnv(// NOPMD
-      final String defaultHoppingSizeInSeconds) {
-    final String hoppingSizeInSeconds = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.HOPPING_SIZE_IN_SECONDS),
-        defaultHoppingSizeInSeconds);
-    final int hoppingSizeInSecondsNumber = Integer.parseInt(hoppingSizeInSeconds);
-    this.hoppingSizeInSeconds = hoppingSizeInSecondsNumber;
-    return this;
-  }
-}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineBuilder.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineBuilder.java
deleted file mode 100644
index e651d44fd2d099ae41296cc08487c787501d9b46..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineBuilder.java
+++ /dev/null
@@ -1,125 +0,0 @@
-package rocks.theodolite.benchmarks.uc3.hazelcastjet;
-
-import com.hazelcast.jet.aggregate.AggregateOperations;
-import com.hazelcast.jet.kafka.KafkaSinks;
-import com.hazelcast.jet.kafka.KafkaSources;
-import com.hazelcast.jet.pipeline.Pipeline;
-import com.hazelcast.jet.pipeline.Sinks;
-import com.hazelcast.jet.pipeline.StreamSource;
-import com.hazelcast.jet.pipeline.StreamStage;
-import com.hazelcast.jet.pipeline.WindowDefinition;
-import java.time.Instant;
-import java.time.LocalDateTime;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-import java.util.TimeZone;
-import java.util.concurrent.TimeUnit;
-import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
-import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKey;
-import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HoursOfDayKeyFactory;
-import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.StatsKeyFactory;
-
-/**
- * Builder to build a HazelcastJet Pipeline for UC3 which can be used for stream processing using
- * Hazelcast Jet.
- */
-public class Uc3PipelineBuilder {
-
-  /**
-   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
-   *
-   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
-   *        attributes.
-   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
-   *        attributes.
-   * @param kafkaInputTopic The name of the input topic used for the pipeline.
-   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
-   * @param hoppingSizeInSeconds The hop length of the sliding window used in the aggregation of
-   *        this pipeline.
-   * @param windowSizeInSeconds The window length of the sliding window used in the aggregation of
-   *        this pipeline.
-   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
-   *         for UC3.
-   */
-  public Pipeline build(final Properties kafkaReadPropsForPipeline,
-      final Properties kafkaWritePropsForPipeline, final String kafkaInputTopic,
-      final String kafkaOutputTopic,
-      final int hoppingSizeInSeconds, final int windowSizeInSeconds) {
-
-    // Define a new Pipeline
-    final Pipeline pipe = Pipeline.create();
-
-    // Define the source
-    final StreamSource<Entry<String, ActivePowerRecord>> kafkaSource = KafkaSources
-        .<String, ActivePowerRecord>kafka(
-            kafkaReadPropsForPipeline, kafkaInputTopic);
-
-    // Extend topology for UC3
-    final StreamStage<Map.Entry<String, String>> uc3Product =
-        this.extendUc3Topology(pipe, kafkaSource, hoppingSizeInSeconds, windowSizeInSeconds);
-
-    // Add Sink1: Logger
-    uc3Product.writeTo(Sinks.logger());
-    // Add Sink2: Write back to kafka for the final benchmark
-    uc3Product.writeTo(KafkaSinks.<String, String>kafka(
-        kafkaWritePropsForPipeline, kafkaOutputTopic));
-
-    return pipe;
-  }
-
-  /**
-   * Extends to a blank Hazelcast Jet Pipeline the UC3 topology defined by theodolite.
-   *
-   * <p>
-   * UC3 takes {@code ActivePowerRecord} object, groups them by keys and calculates average double
-   * values for a sliding window and sorts them into the hour of the day.
-   * </p>
-   *
-   * @param pipe The blank hazelcast jet pipeline to extend the logic to.
-   * @param source A streaming source to fetch data from.
-   * @param hoppingSizeInSeconds The jump distance of the "sliding" window.
-   * @param windowSizeInSeconds The size of the "sliding" window.
-   * @return A {@code StreamStage<Map.Entry<String,String>>} with the above definition of the key
-   *         and value of the Entry object. It can be used to be further modified or directly be
-   *         written into a sink.
-   */
-  public StreamStage<Map.Entry<String, String>> extendUc3Topology(final Pipeline pipe,
-      final StreamSource<Entry<String, ActivePowerRecord>> source, final int hoppingSizeInSeconds,
-      final int windowSizeInSeconds) {
-    // Build the pipeline topology.
-    return pipe
-        .readFrom(source)
-        // use Timestamps
-        .withNativeTimestamps(0)
-        .setLocalParallelism(1)
-        // Map timestamp to hour of day and create new key using sensorID and
-        // datetime mapped to HourOfDay
-        .map(record -> {
-          final String sensorId = record.getValue().getIdentifier();
-          final long timestamp = record.getValue().getTimestamp();
-          final LocalDateTime dateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp),
-              TimeZone.getDefault().toZoneId());
-
-          final StatsKeyFactory<HourOfDayKey> keyFactory = new HoursOfDayKeyFactory();
-          final HourOfDayKey newKey = keyFactory.createKey(sensorId, dateTime);
-
-          return Map.entry(newKey, record.getValue());
-        })
-        // group by new keys
-        .groupingKey(Entry::getKey)
-        // Sliding/Hopping Window
-        .window(WindowDefinition.sliding(TimeUnit.SECONDS.toMillis(windowSizeInSeconds),
-            TimeUnit.SECONDS.toMillis(hoppingSizeInSeconds)))
-        // get average value of group (sensoreId,hourOfDay)
-        .aggregate(
-            AggregateOperations.averagingDouble(record -> record.getValue().getValueInW()))
-        // map to return pair (sensorID,hourOfDay) -> (averaged what value)
-        .map(agg -> {
-          final String theValue = agg.getValue().toString();
-          final String theKey = agg.getKey().toString();
-          return Map.entry(theKey, theValue);
-        });
-  }
-
-}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..1bf2bb4ce72b7dece96b036daaaaf475180c72f8
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineFactory.java
@@ -0,0 +1,132 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.time.Duration;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.util.Map;
+import java.util.Properties;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.PipelineFactory;
+import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
+
+
+/**
+ * PipelineFactory for use case 3. Allows to build and extend pipelines.
+ */
+public class Uc3PipelineFactory extends PipelineFactory {
+
+  private final ZoneId zone = ZoneId.of("Europe/Paris"); // TODO as parameter
+
+  private final Duration hoppingSize;
+  private final Duration windowSize;
+  private final Duration emitPeriod;
+
+  /**
+   * Build a new Pipeline.
+   *
+   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
+   *        attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param hoppingSize The hop length of the sliding window used in the aggregation of this
+   *        pipeline.
+   * @param windowSize The window length of the sliding window used in the aggregation of this
+   *        pipeline.
+   */
+  public Uc3PipelineFactory(final Properties kafkaReadPropsForPipeline,
+      final String kafkaInputTopic,
+      final Properties kafkaWritePropsForPipeline,
+      final String kafkaOutputTopic,
+      final Duration windowSize,
+      final Duration hoppingSize,
+      final Duration emitPeriod) {
+    super(
+        kafkaReadPropsForPipeline,
+        kafkaInputTopic,
+        kafkaWritePropsForPipeline,
+        kafkaOutputTopic);
+    this.windowSize = windowSize;
+    this.hoppingSize = hoppingSize;
+    this.emitPeriod = emitPeriod;
+  }
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @return a pipeline used which can be used in a Hazelcast Jet Instance to process data for UC3.
+   */
+  @Override
+  public Pipeline buildPipeline() {
+
+    // Define the source
+    final StreamSource<Map.Entry<String, ActivePowerRecord>> kafkaSource = KafkaSources
+        .<String, ActivePowerRecord>kafka(
+            this.kafkaReadPropsForPipeline, this.kafkaInputTopic);
+
+    // Extend topology for UC3
+    final StreamStage<Map.Entry<String, String>> uc3Product =
+        this.extendUc3Topology(kafkaSource);
+
+    // Add Sink1: Logger
+    // uc3Product.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    uc3Product.writeTo(KafkaSinks.<String, String>kafka(
+        this.kafkaWritePropsForPipeline, this.kafkaOutputTopic));
+
+    return this.pipe;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC3 topology defined by theodolite.
+   *
+   * <p>
+   * UC3 takes {@code ActivePowerRecord} object, groups them by keys and calculates average double
+   * values for a sliding window and sorts them into the hour of the day.
+   * </p>
+   *
+   * @param source A streaming source to fetch data from.
+   * @return A {@code StreamStage<Map.Entry<String,String>>} with the above definition of the key
+   *         and value of the Entry object. It can be used to be further modified or directly be
+   *         written into a sink.
+   */
+  public StreamStage<Map.Entry<String, String>> extendUc3Topology(
+      final StreamSource<Map.Entry<String, ActivePowerRecord>> source) {
+
+    final StatsKeyFactory<HourOfDayKey> keyFactory = new HourOfDayKeyFactory();
+    final ZoneId localZone = this.zone; // Make serializable in lambdas
+
+    // Build the pipeline topology.
+    return this.pipe
+        .readFrom(source)
+        // use Timestamps
+        .withNativeTimestamps(0)
+        // .setLocalParallelism(1)
+        // Group by HourOfDayKey
+        .groupingKey(record -> {
+          final String sensorId = record.getValue().getIdentifier();
+          final Instant instant = Instant.ofEpochMilli(record.getValue().getTimestamp());
+          final LocalDateTime dateTime = LocalDateTime.ofInstant(instant, localZone);
+          return keyFactory.createKey(sensorId, dateTime);
+        })
+        // Sliding/Hopping Window
+        .window(WindowDefinition
+            .sliding(this.windowSize.toMillis(), this.hoppingSize.toMillis())
+            .setEarlyResultsPeriod(this.emitPeriod.toMillis()))
+        // get aggregated values for (sensoreId, hourOfDay)
+        .aggregate(StatsAggregatorFactory.create())
+        // map to return pair sensorID -> stats
+        .map(agg -> {
+          final String sensorId = agg.getKey().getSensorId();
+          final String stats = agg.getValue().toString();
+          return Map.entry(sensorId, stats);
+        });
+  }
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
index e3371cc87e20e85e6e8c327955537e6e49dab86e..7b28bf5c4047b1f644703268969fe9c680b50238 100644
--- a/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -1,8 +1,11 @@
-application.name=theodolite-uc1-application
+application.name=theodolite-uc3-application
 application.version=0.0.1
 
 kafka.bootstrap.servers=localhost:9092
 kafka.input.topic=input
-
+kafka.output.topic=output
 schema.registry.url=http://localhost:8081
 
+aggregation.duration.days=30
+aggregation.advance.days=1
+aggregation.emit.period.seconds=15
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java b/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java
index 969ff27d1154a0bd52cb3a048eca2f12ca901138..8f17e981eee31ca46edeeccdf7b7b8822816280d 100644
--- a/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java
@@ -1,5 +1,6 @@
 package rocks.theodolite.benchmarks.uc3.hazelcastjet;
 
+import com.google.common.math.Stats;
 import com.hazelcast.jet.Jet;
 import com.hazelcast.jet.JetInstance;
 import com.hazelcast.jet.config.JetConfig;
@@ -12,20 +13,19 @@ import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
 import com.hazelcast.jet.pipeline.test.Assertions;
 import com.hazelcast.jet.pipeline.test.TestSources;
 import com.hazelcast.jet.test.SerialTest;
-import java.time.Instant;
-import java.time.LocalDateTime;
+import java.time.Duration;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.TimeZone;
+import java.util.Properties;
 import java.util.concurrent.CompletionException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
-import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKey;
-import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKeySerializer;
 
 /**
  * Test methods for the Hazelcast Jet Implementation of UC3.
@@ -33,6 +33,20 @@ import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKeySer
 @Category(SerialTest.class)
 public class Uc3PipelineTest extends JetTestSupport {
 
+  private static final Logger LOGGER = LoggerFactory.getLogger(Uc3PipelineTest.class);
+
+  // Setup Configuration
+  private static final int TEST_ITEMS_PER_SECOND = 1;
+  private static final String TEST_SENSOR_NAME = "TEST-SENSOR";
+  private static final Double TEST_VALUE_IN_W = 10.0;
+  private static final Duration TEST_WINDOW_SLIDE = Duration.ofSeconds(1);
+  private static final Duration TEST_WINDOW_SIZE = Duration.ofSeconds(50);
+  private static final Duration TEST_EMIT_PERIOD = Duration.ofSeconds(0); // Do not emit early
+                                                                          // results
+  // Used to check hourOfDay
+  private static final long MOCK_TIMESTAMP = 1632741651;
+
+
   // Test Machinery
   private JetInstance testInstance = null;
   private Pipeline testPipeline = null;
@@ -46,16 +60,6 @@ public class Uc3PipelineTest extends JetTestSupport {
   @Before
   public void buildUc3Pipeline() {
 
-    // Setup Configuration
-    final int testItemsPerSecond = 1;
-    final String testSensorName = "TEST-SENSOR";
-    final Double testValueInW = 10.0;
-    final int testHopSizeInSec = 1;
-    final int testWindowSizeInSec = 50;
-    // Used to check hourOfDay
-    final long mockTimestamp = 1632741651;
-
-
     // Create mock jet instance with configuration
     final String testClusterName = randomName();
     final JetConfig testJetConfig = new JetConfig();
@@ -64,19 +68,24 @@ public class Uc3PipelineTest extends JetTestSupport {
 
     // Create a test source
     final StreamSource<Entry<String, ActivePowerRecord>> testSource =
-        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+        TestSources.itemStream(TEST_ITEMS_PER_SECOND, (timestamp, item) -> {
           final ActivePowerRecord testRecord =
-              new ActivePowerRecord(testSensorName, mockTimestamp, testValueInW);
+              new ActivePowerRecord(TEST_SENSOR_NAME, MOCK_TIMESTAMP, TEST_VALUE_IN_W);
           final Entry<String, ActivePowerRecord> testEntry =
-              Map.entry(testSensorName, testRecord);
+              Map.entry(TEST_SENSOR_NAME, testRecord);
           return testEntry;
         });
 
     // Create pipeline to test
-    final Uc3PipelineBuilder pipelineBuilder = new Uc3PipelineBuilder();
-    this.testPipeline = Pipeline.create();
-    this.uc3Topology = pipelineBuilder.extendUc3Topology(this.testPipeline, testSource,
-        testHopSizeInSec, testWindowSizeInSec);
+    final Properties properties = new Properties();
+    final Uc3PipelineFactory factory = new Uc3PipelineFactory(
+        properties, "", properties, "", TEST_WINDOW_SIZE,
+        TEST_WINDOW_SLIDE,
+        TEST_EMIT_PERIOD);
+
+    this.uc3Topology = factory.extendUc3Topology(testSource);
+
+    this.testPipeline = factory.getPipe();
   }
 
   /**
@@ -87,49 +96,40 @@ public class Uc3PipelineTest extends JetTestSupport {
 
     // Assertion Configuration
     final int timeout = 10;
-    final String testSensorName = "TEST-SENSOR";
-    final Double testValueInW = 10.0;
-    // Used to check hourOfDay
-    final long mockTimestamp = 1632741651;
+    // final String testSensorName = "TEST-SENSOR";
+    // final double testValueInW = 10.0;
 
     // Assertion
     this.uc3Topology.apply(Assertions.assertCollectedEventually(timeout,
         collection -> {
 
           // DEBUG
-          System.out.println("DEBUG: CHECK 1 || Entered Assertion of testOutput()");
+          LOGGER.info("CHECK 1 || Entered Assertion of testOutput()");
 
           // Check all collected Items
           boolean allOkay = true;
           if (collection != null) {
-            System.out.println("DEBUG: CHECK 2 || Collection Size: " + collection.size());
-            for (final Entry<String, String> currentEntry : collection) {
-
-              // Build hour of day
-              final long timestamp = mockTimestamp;
-              final int expectedHour = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp),
-                  TimeZone.getDefault().toZoneId()).getHour();
-
-              final String expectedKey = testSensorName + ";" + expectedHour;
-              final String expectedValue = testValueInW.toString();
+            LOGGER.info("CHECK 2 || Collection Size: " + collection.size());
+            for (final Entry<String, String> entry : collection) {
+              // Compare expected output with generated output
+              final String expectedKey = TEST_SENSOR_NAME;
+              final String expectedValue = Stats.of(TEST_VALUE_IN_W).toString();
 
               // DEBUG
-              System.out.println(
-                  "DEBUG: CHECK 3 || Expected Output: '" + expectedKey + "=" + expectedValue
-                      + "' - Actual Output: '" + currentEntry.getKey() + "="
-                      + currentEntry.getValue().toString() + "'");
-
-              if (!(currentEntry.getKey().equals(expectedKey)
-                  && currentEntry.getValue().toString().equals(expectedValue))) {
-                System.out.println("DEBUG: CHECK 5 || Failed assertion!");
+              LOGGER.info(
+                  "CHECK 3 || Expected Output: '" + expectedKey + "=" + expectedValue
+                      + "' - Actual Output: '" + entry.getKey() + "="
+                      + entry.getValue() + "'");
+
+              if (!(entry.getKey().equals(expectedKey) && entry.getValue().equals(expectedValue))) {
+                LOGGER.info("CHECK 5 || Failed assertion!");
                 allOkay = false;
               }
             }
           }
 
           // Assertion
-          Assert.assertTrue(
-              "Items do not match expected structure!", allOkay);
+          Assert.assertTrue("Items do not match expected structure!", allOkay);
         }));
 
     // Run the test!
diff --git a/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/HistoryService.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/HistoryService.java
index 2151d0be487d89f23b3bb170a95ff60270948841..637c2e60b81592cf7181aafb7e7ea5840a753128 100644
--- a/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/HistoryService.java
+++ b/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/HistoryService.java
@@ -5,7 +5,6 @@ import java.util.concurrent.CompletableFuture;
 import org.apache.commons.configuration2.Configuration;
 import org.apache.kafka.streams.KafkaStreams;
 import rocks.theodolite.benchmarks.commons.commons.configuration.ServiceConfigurations;
-import rocks.theodolite.benchmarks.commons.kstreams.ConfigurationKeys;
 
 /**
  * A microservice that manages the history and, therefore, stores and aggregates incoming
@@ -33,11 +32,11 @@ public class HistoryService {
     // Use case specific stream configuration
     final Uc3KafkaStreamsBuilder uc3KafkaStreamsBuilder = new Uc3KafkaStreamsBuilder(this.config);
     uc3KafkaStreamsBuilder
-        .outputTopic(this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC))
+        .outputTopic(this.config.getString(Uc3ConfigurationKeys.KAFKA_OUTPUT_TOPIC))
         .aggregationDuration(
-            Duration.ofDays(this.config.getInt(ConfigurationKeys.AGGREGATION_DURATION_DAYS)))
+            Duration.ofDays(this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_DURATION_DAYS)))
         .aggregationAdvance(
-            Duration.ofDays(this.config.getInt(ConfigurationKeys.AGGREGATION_ADVANCE_DAYS)));
+            Duration.ofDays(this.config.getInt(Uc3ConfigurationKeys.AGGREGATION_ADVANCE_DAYS)));
 
     // Configuration of the stream application
     final KafkaStreams kafkaStreams = uc3KafkaStreamsBuilder.build();
diff --git a/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/TopologyBuilder.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/TopologyBuilder.java
index d6e000d815b0871e065af4a71d89d0e19949e73c..5c946cc287e97503c426b932e01b447ce82ac854 100644
--- a/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/TopologyBuilder.java
+++ b/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/TopologyBuilder.java
@@ -81,9 +81,7 @@ public class TopologyBuilder {
         .map((key, stats) -> KeyValue.pair(
             keyFactory.getSensorId(key.key()),
             stats.toString()))
-        // TODO
-        // statsRecordFactory.create(key, value)))
-        // .peek((k, v) -> LOGGER.info("{}: {}", k, v)) // TODO Temp logging
+        // .peek((k, v) -> LOGGER.info("{}: {}", k, v))
         .to(
             this.outputTopic,
             Produced.with(
diff --git a/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/Uc3ConfigurationKeys.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/Uc3ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..db0bdaa3e7171302aeaa117b1446937ce467d131
--- /dev/null
+++ b/theodolite-benchmarks/uc3-kstreams/src/main/java/rocks/theodolite/benchmarks/uc3/kstreams/Uc3ConfigurationKeys.java
@@ -0,0 +1,16 @@
+package rocks.theodolite.benchmarks.uc3.kstreams;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class Uc3ConfigurationKeys {
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String AGGREGATION_DURATION_DAYS = "aggregation.duration.days";
+
+  public static final String AGGREGATION_ADVANCE_DAYS = "aggregation.advance.days";
+
+  private Uc3ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc3-kstreams/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-kstreams/src/main/resources/META-INF/application.properties
index 0ce745fb61f87016aee5cc242c03069924ceb58e..8189f7568a64cf645375bc6fc2c1323b66d85d75 100644
--- a/theodolite-benchmarks/uc3-kstreams/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc3-kstreams/src/main/resources/META-INF/application.properties
@@ -8,3 +8,6 @@ aggregation.duration.days=30
 aggregation.advance.days=1
 
 schema.registry.url=http://localhost:8081
+
+# Kafka Streams Config
+commit.interval.ms=5000	
diff --git a/theodolite-benchmarks/uc4-beam-samza/Dockerfile b/theodolite-benchmarks/uc4-beam-samza/Dockerfile
index b59585f748b95cf62e59be01c8fa9dc0d919f43a..92a400d15d7726dc948cfd7df7e6e4ee7fd76812 100644
--- a/theodolite-benchmarks/uc4-beam-samza/Dockerfile
+++ b/theodolite-benchmarks/uc4-beam-samza/Dockerfile
@@ -1,8 +1,9 @@
 FROM openjdk:11-slim
 
 ENV MAX_SOURCE_PARALLELISM=1024
+ENV ENABLE_METRICS=true
 
 ADD build/distributions/uc4-beam-samza.tar /
 ADD samza-standalone.properties /
 
-CMD /uc4-beam-samza/bin/uc4-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
+CMD /uc4-beam-samza/bin/uc4-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=$ENABLE_METRICS --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/PipelineFactory.java b/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/PipelineFactory.java
index 955f7101515c9467edc2e4900aa5464437f0e904..200e0da601faeadd790cf1fd0deeba38e6b1a7fa 100644
--- a/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/PipelineFactory.java
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/PipelineFactory.java
@@ -67,17 +67,18 @@ public class PipelineFactory extends AbstractPipelineFactory {
   @Override
   protected void constructPipeline(final Pipeline pipeline) { // NOPMD
     // Additional needed variables
-    final String feedbackTopic = this.config.getString(ConfigurationKeys.KAFKA_FEEDBACK_TOPIC);
-    final String outputTopic = this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+    final String feedbackTopic = this.config.getString(Uc4ConfigurationKeys.KAFKA_FEEDBACK_TOPIC);
+    final String outputTopic = this.config.getString(Uc4ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
     final String configurationTopic =
-        this.config.getString(ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC);
+        this.config.getString(Uc4ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC);
 
-    final Duration duration = Duration.standardSeconds(
-        this.config.getInt(ConfigurationKeys.KAFKA_WINDOW_DURATION_MINUTES));
+    final Duration duration = Duration.millis(
+        this.config.getInt(Uc4ConfigurationKeys.EMIT_PERIOD_MS));
+    // final boolean enableTrigger = this.config.getBoolean(Uc4ConfigurationKeys.TRIGGER_ENABLE);
     final Duration triggerDelay = Duration.standardSeconds(
-        this.config.getInt(ConfigurationKeys.TRIGGER_INTERVAL));
-    final Duration gracePeriod = Duration.standardSeconds(
-        this.config.getInt(ConfigurationKeys.GRACE_PERIOD_MS));
+        this.config.getInt(Uc4ConfigurationKeys.TRIGGER_INTERVAL_SECONDS));
+    final Duration gracePeriod = Duration.millis(
+        this.config.getInt(Uc4ConfigurationKeys.GRACE_PERIOD_MS));
 
     // Read from Kafka
     final String bootstrapServer = this.config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
@@ -115,9 +116,11 @@ public class PipelineFactory extends AbstractPipelineFactory {
         .apply("Read Windows", Window.into(FixedWindows.of(duration)))
         .apply("Set trigger for input", Window
             .<KV<String, ActivePowerRecord>>configure()
-            .triggering(Repeatedly.forever(
-                AfterProcessingTime.pastFirstElementInPane()
-                    .plusDelayOf(triggerDelay)))
+            .triggering(Repeatedly
+                .forever(
+                    AfterProcessingTime
+                        .pastFirstElementInPane()
+                        .plusDelayOf(triggerDelay)))
             .withAllowedLateness(gracePeriod)
             .discardingFiredPanes());
 
@@ -204,7 +207,8 @@ public class PipelineFactory extends AbstractPipelineFactory {
         .apply("Reset trigger for aggregations", Window
             .<KV<String, ActivePowerRecord>>configure()
             .triggering(Repeatedly.forever(
-                AfterProcessingTime.pastFirstElementInPane()
+                AfterProcessingTime
+                    .pastFirstElementInPane()
                     .plusDelayOf(triggerDelay)))
             .withAllowedLateness(gracePeriod)
             .discardingFiredPanes())
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/Uc4ConfigurationKeys.java b/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/Uc4ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..0f314f1497708f73c7bc00337438f7a53d081731
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/Uc4ConfigurationKeys.java
@@ -0,0 +1,24 @@
+package rocks.theodolite.benchmarks.uc4.beam;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class Uc4ConfigurationKeys {
+
+  public static final String KAFKA_FEEDBACK_TOPIC = "kafka.feedback.topic";
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String KAFKA_CONFIGURATION_TOPIC = "kafka.configuration.topic";
+
+  public static final String EMIT_PERIOD_MS = "emit.period.ms";
+
+  public static final String GRACE_PERIOD_MS = "grace.period.ms";
+
+  // public static final String TRIGGER_ENABLE = "trigger.enable";
+
+  public static final String TRIGGER_INTERVAL_SECONDS = "trigger.interval.seconds";
+
+  private Uc4ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-beam/src/main/resources/META-INF/application.properties
index c1a8ca17b41ab8c8f0fa939c748200db5ba7d0d2..654d7d94b70ff03cab6152fef67c55a073a58704 100644
--- a/theodolite-benchmarks/uc4-beam/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc4-beam/src/main/resources/META-INF/application.properties
@@ -6,21 +6,17 @@ kafka.input.topic=input
 kafka.output.topic=output
 kafka.configuration.topic=configuration
 kafka.feedback.topic=aggregation-feedback
-kafka.window.duration.minutes=1
 
 schema.registry.url=http://localhost:8081
 
-aggregation.duration.days=30
-aggregation.advance.days=1
-
-trigger.interval=15
-grace.period.ms=270
-
-num.threads=1
-commit.interval.ms=1000
-cache.max.bytes.buffering=-1
+emit.period.ms=5000
+#trigger.enable=true
+trigger.interval.seconds=15
+grace.period.ms=270000
 
 specific.avro.reader=true
+
+# Kafka Settings
 enable.auto.commit=true
 max.poll.records=500
-auto.offset.reset=earliest
\ No newline at end of file
+auto.offset.reset=earliest
diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/AggregationServiceFlinkJob.java b/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/AggregationServiceFlinkJob.java
index 5f4515cb851439841d1de3193f21275545033481..abdb9aaed5d5209c2932d15039d9fecb687327b5 100644
--- a/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/AggregationServiceFlinkJob.java
+++ b/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/AggregationServiceFlinkJob.java
@@ -19,6 +19,7 @@ import org.slf4j.LoggerFactory;
 import rocks.theodolite.benchmarks.commons.configuration.events.Event;
 import rocks.theodolite.benchmarks.commons.configuration.events.EventSerde;
 import rocks.theodolite.benchmarks.commons.flink.AbstractFlinkService;
+import rocks.theodolite.benchmarks.commons.flink.ConfigurationKeys;
 import rocks.theodolite.benchmarks.commons.flink.KafkaConnectorFactory;
 import rocks.theodolite.benchmarks.commons.flink.TupleType;
 import rocks.theodolite.benchmarks.commons.kafka.avro.SchemaRegistryAvroSerdeFactory;
@@ -62,16 +63,17 @@ public final class AggregationServiceFlinkJob extends AbstractFlinkService {
   @Override
   protected void buildPipeline() {
     // Get configurations
-    final String kafkaBroker = this.config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
-    final String schemaRegistryUrl = this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL);
+    final String kafkaBroker = this.config.getString(Uc4ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
+    final String schemaRegistryUrl =
+        this.config.getString(Uc4ConfigurationKeys.SCHEMA_REGISTRY_URL);
     final String inputTopic = this.config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
-    final String outputTopic = this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+    final String outputTopic = this.config.getString(Uc4ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
     final Time windowSize =
-        Time.milliseconds(this.config.getLong(ConfigurationKeys.WINDOW_SIZE_MS));
+        Time.milliseconds(this.config.getLong(Uc4ConfigurationKeys.EMIT_PERIOD_MS));
     final Duration windowGrace =
-        Duration.ofMillis(this.config.getLong(ConfigurationKeys.WINDOW_GRACE_MS));
+        Duration.ofMillis(this.config.getLong(Uc4ConfigurationKeys.GRACE_PERIOD_MS));
     final String configurationTopic =
-        this.config.getString(ConfigurationKeys.CONFIGURATION_KAFKA_TOPIC);
+        this.config.getString(Uc4ConfigurationKeys.CONFIGURATION_KAFKA_TOPIC);
     final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true);
 
     final KafkaConnectorFactory kafkaConnector = new KafkaConnectorFactory(
diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/ConfigurationKeys.java b/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/ConfigurationKeys.java
deleted file mode 100644
index 290f4341958dd133926234219017cbb9149dd3f8..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/ConfigurationKeys.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package rocks.theodolite.benchmarks.uc4.flink;
-
-/**
- * Keys to access configuration parameters.
- */
-public final class ConfigurationKeys {
-  public static final String APPLICATION_NAME = "application.name";
-
-  public static final String APPLICATION_VERSION = "application.version";
-
-  public static final String CONFIGURATION_KAFKA_TOPIC = "configuration.kafka.topic";
-
-  public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
-
-  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
-
-  public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic";
-
-  public static final String SCHEMA_REGISTRY_URL = "schema.registry.url";
-
-  public static final String WINDOW_SIZE_MS = "window.size.ms";
-
-  public static final String WINDOW_GRACE_MS = "window.grace.ms";
-
-  public static final String COMMIT_INTERVAL_MS = "commit.interval.ms";
-
-  public static final String FLINK_STATE_BACKEND = "flink.state.backend";
-
-  public static final String FLINK_STATE_BACKEND_PATH = "flink.state.backend.path";
-
-  public static final String FLINK_STATE_BACKEND_MEMORY_SIZE = // NOPMD
-      "flink.state.backend.memory.size";
-
-  public static final String DEBUG = "debug";
-
-  public static final String CHECKPOINTING = "checkpointing";
-
-  public static final String PARALLELISM = "parallelism";
-
-  private ConfigurationKeys() {}
-
-}
diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/Uc4ConfigurationKeys.java b/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/Uc4ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..6fd2b0fa0ec50febd213fb3f7d24463d2bd6f51c
--- /dev/null
+++ b/theodolite-benchmarks/uc4-flink/src/main/java/rocks/theodolite/benchmarks/uc4/flink/Uc4ConfigurationKeys.java
@@ -0,0 +1,22 @@
+package rocks.theodolite.benchmarks.uc4.flink;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class Uc4ConfigurationKeys {
+
+  public static final String CONFIGURATION_KAFKA_TOPIC = "configuration.kafka.topic";
+
+  public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String SCHEMA_REGISTRY_URL = "schema.registry.url";
+
+  public static final String EMIT_PERIOD_MS = "emit.period.ms";
+
+  public static final String GRACE_PERIOD_MS = "grace.period.ms";
+
+  private Uc4ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc4-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-flink/src/main/resources/META-INF/application.properties
index de85fdb88c0462edc9fba58409918470fcb8cb6c..9250ec02fc36ad04d080204fb254e59ae3231cec 100644
--- a/theodolite-benchmarks/uc4-flink/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc4-flink/src/main/resources/META-INF/application.properties
@@ -1,4 +1,4 @@
-application.name=theodolite-uc2-application
+application.name=theodolite-uc4-application
 application.version=0.0.1
 
 configuration.host=localhost
@@ -9,8 +9,9 @@ kafka.bootstrap.servers=localhost:9092
 kafka.input.topic=input
 kafka.output.topic=output
 schema.registry.url=http://localhost:8081
-window.size.ms=1000
-window.grace.ms=0
-num.threads=1
-commit.interval.ms=1000
-cache.max.bytes.buffering=-1
+
+emit.period.ms=5000
+grace.period.ms=0
+
+# Flink configuration
+checkpointing.interval.ms=1000
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/AggregatedActivePowerRecordAccumulator.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/AggregatedActivePowerRecordAccumulator.java
similarity index 97%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/AggregatedActivePowerRecordAccumulator.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/AggregatedActivePowerRecordAccumulator.java
index 14934fbe3ceec6e01836958c1f7686e225ea40fd..567986ce2fe35df5a4571d15ecd0c51e24dfb9f5 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/AggregatedActivePowerRecordAccumulator.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/AggregatedActivePowerRecordAccumulator.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
 
@@ -20,7 +20,6 @@ public class AggregatedActivePowerRecordAccumulator {
     // This constructor is intentionally empty. Nothing special is needed here.
   }
 
-
   /**
    * Creates an AggregationObject.
    */
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ChildParentsTransformer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ChildParentsTransformer.java
similarity index 98%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ChildParentsTransformer.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ChildParentsTransformer.java
index 3ba604270e37d746c2e98bc4eef5c80d2526b446..b9b599ed89b243d9a1f6c84e765751e4ebe21ecd 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ChildParentsTransformer.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ChildParentsTransformer.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
 import java.util.Map;
 import java.util.Optional;
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/EventDeserializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/EventDeserializer.java
similarity index 92%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/EventDeserializer.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/EventDeserializer.java
index ebdc9de86e82a9c8c16a71830190d26e6f0e34fa..7b0118ba56e5e9d8d2c81dbc1a41400f0a4850c5 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/EventDeserializer.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/EventDeserializer.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
 import java.util.Map;
 import org.apache.kafka.common.serialization.Deserializer;
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/HashMapSupplier.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HashMapSupplier.java
similarity index 88%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/HashMapSupplier.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HashMapSupplier.java
index ec240bf8cb925aa3a444b56457da5adc411212b2..61910850bf3f66025866acb93d92b24b4b71d692 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/HashMapSupplier.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HashMapSupplier.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
 import com.hazelcast.function.SupplierEx;
 import java.util.HashMap;
@@ -21,6 +21,4 @@ public class HashMapSupplier implements SupplierEx<HashMap<String, Set<String>>>
     return this.get();
   }
 
-
-
 }
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java
index 419c25fec3eeffbd9eabef4897c44b7c6e773cee..97ea33eda56f34d5f1a2f8e5def8373c259540d0 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java
@@ -1,74 +1,80 @@
 package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import io.confluent.kafka.serializers.KafkaAvroSerializer;
+import java.time.Duration;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.HazelcastJetService;
+import rocks.theodolite.benchmarks.commons.model.sensorregistry.ImmutableSensorRegistry;
+
 
 /**
  * A microservice that manages the history and, therefore, stores and aggregates incoming
  * measurements.
  */
-public class HistoryService {
+public class HistoryService extends HazelcastJetService {
 
   private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
 
-  // Hazelcast settings (default)
-  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
-  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+  /**
+   * Constructs the use case logic for UC4. Retrieves the needed values and instantiates a pipeline
+   * factory.
+   */
+  public HistoryService() {
+    super(LOGGER);
+    final Properties kafkaProps =
+        this.propsBuilder.buildReadProperties(
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
 
-  // Kafka settings (default)
-  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
-  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
-  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
-  private static final String KAFKA_CONFIG_TOPIC_DEFAULT = "configuration";
-  private static final String KAFKA_FEEDBACK_TOPIC_DEFAULT = "aggregation-feedback";
-  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+    final Properties kafkaConfigReadProps =
+        this.propsBuilder.buildReadProperties(
+            EventDeserializer.class.getCanonicalName(),
+            StringDeserializer.class.getCanonicalName());
 
-  // UC4 specific (default)
-  private static final String WINDOW_SIZE_DEFAULT_MS = "5000";
+    final Properties kafkaAggregationReadProps =
+        this.propsBuilder.buildReadProperties(
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
 
-  // Job name (default)
-  private static final String JOB_NAME = "uc4-hazelcastjet";
+    final Properties kafkaWriteProps =
+        this.propsBuilder.buildWriteProperties(
+            StringSerializer.class.getCanonicalName(),
+            KafkaAvroSerializer.class.getCanonicalName());
 
-  /**
-   * Entrypoint for UC4 using Gradle Run.
-   */
-  public static void main(final String[] args) {
-    final HistoryService uc4HistoryService = new HistoryService();
-    try {
-      uc4HistoryService.run();
-    } catch (final Exception e) { // NOPMD
-      LOGGER.error("ABORT MISSION!: {}", e);
-    }
-  }
+    final String outputTopic = this.config.getString(Uc4ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
 
-  /**
-   * Start a UC4 service.
-   *
-   * @throws Exception This Exception occurs if the Uc4HazelcastJetFactory is used in the wrong way.
-   *         Detailed data is provided once an Exception occurs.
-   */
-  public void run() throws Exception { // NOPMD
-    this.createHazelcastJetApplication();
+    final String configurationTopic =
+        this.config.getString(Uc4ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC);
+
+    final String feedbackTopic = this.config.getString(Uc4ConfigurationKeys.KAFKA_FEEDBACK_TOPIC);
+
+    final Duration windowSize = Duration.ofMillis(
+        this.config.getInt(Uc4ConfigurationKeys.EMIT_PERIOD_MS));
+
+    this.pipelineFactory = new Uc4PipelineFactory(
+        kafkaProps,
+        kafkaConfigReadProps,
+        kafkaAggregationReadProps,
+        kafkaWriteProps,
+        this.kafkaInputTopic, outputTopic, configurationTopic, feedbackTopic,
+        windowSize);
   }
 
-  /**
-   * Creates a Hazelcast Jet Application for UC4 using the Uc1HazelcastJetFactory.
-   *
-   * @throws Exception This Exception occurs if the Uc4HazelcastJetFactory is used in the wrong way.
-   *         Detailed data is provided once an Exception occurs.
-   */
-  private void createHazelcastJetApplication() throws Exception { // NOPMD
-    new Uc4HazelcastJetFactory()
-        .setReadPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT,JOB_NAME)
-        .setWritePropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
-        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
-        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
-        .setKafkaConfigurationTopicFromEnv(KAFKA_CONFIG_TOPIC_DEFAULT)
-        .setKafkaFeedbackTopicFromEnv(KAFKA_FEEDBACK_TOPIC_DEFAULT)
-        .setWindowSizeFromEnv(WINDOW_SIZE_DEFAULT_MS)
-        .buildUc4JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
-        .buildUc4Pipeline()
-        .runUc4Job(JOB_NAME);
+  @Override
+  protected void registerSerializer() {
+    this.jobConfig.registerSerializer(ValueGroup.class, ValueGroupSerializer.class)
+        .registerSerializer(SensorGroupKey.class, SensorGroupKeySerializer.class)
+        .registerSerializer(ImmutableSensorRegistry.class,
+            ImmutableSensorRegistryUc4Serializer.class);
   }
 
+
+  public static void main(final String[] args) {
+    new HistoryService().run();
+  }
 }
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ImmutableSensorRegistryUc4Serializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ImmutableSensorRegistryUc4Serializer.java
similarity index 94%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ImmutableSensorRegistryUc4Serializer.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ImmutableSensorRegistryUc4Serializer.java
index 84e007dde7fb3a075a605bacfbbda05f206c2ee4..5b55162e5c026e0ca4dc69e51c5a0332a00aae6d 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ImmutableSensorRegistryUc4Serializer.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ImmutableSensorRegistryUc4Serializer.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
 import com.hazelcast.nio.ObjectDataInput;
 import com.hazelcast.nio.ObjectDataOutput;
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKey.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/SensorGroupKey.java
similarity index 93%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKey.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/SensorGroupKey.java
index 24114cc90a709c99e74495714559c12324e07788..33fca91e70c0cb826fe79f700264ab3733f5ca82 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKey.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/SensorGroupKey.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
 import java.util.Objects;
 
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKeySerializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/SensorGroupKeySerializer.java
similarity index 91%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKeySerializer.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/SensorGroupKeySerializer.java
index 12a46b9d8f91ea145f614654a6ce9813b9014290..28d427a4527f1777335ffb3ebfc7eb0781a2c15f 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKeySerializer.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/SensorGroupKeySerializer.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
 import com.hazelcast.nio.ObjectDataInput;
 import com.hazelcast.nio.ObjectDataOutput;
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4ConfigurationKeys.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..6c4c63396208cafc68a83835f29609b8582370ca
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4ConfigurationKeys.java
@@ -0,0 +1,17 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
+
+/**
+ * Configuration Keys used for Hazelcast Jet Benchmark implementations.
+ */
+public class Uc4ConfigurationKeys {
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String KAFKA_CONFIGURATION_TOPIC = "kafka.configuration.topic";
+
+  public static final String KAFKA_FEEDBACK_TOPIC = "kafka.feedback.topic";
+
+  public static final String EMIT_PERIOD_MS = "emit.period.ms";
+  // public static final String GRACE_PERIOD_MS = "grace.period.ms";
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4HazelcastJetFactory.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4HazelcastJetFactory.java
deleted file mode 100644
index 69ccee100f2946237390c8b19ff0f20036237d37..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4HazelcastJetFactory.java
+++ /dev/null
@@ -1,379 +0,0 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet;
-
-import com.hazelcast.jet.JetInstance;
-import com.hazelcast.jet.config.JobConfig;
-import com.hazelcast.jet.pipeline.Pipeline;
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
-import io.confluent.kafka.serializers.KafkaAvroSerializer;
-import java.util.Objects;
-import java.util.Properties;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.apache.kafka.common.serialization.StringSerializer;
-import org.slf4j.Logger;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
-import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
-import rocks.theodolite.benchmarks.commons.model.sensorregistry.ImmutableSensorRegistry;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.EventDeserializer;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ImmutableSensorRegistryUc4Serializer;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKey;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKeySerializer;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroup;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroupSerializer;
-
-/**
- * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC4
- * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
- * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
- * the Read and Write Properties and set the input, output, and configuration topic. This can be
- * done using internal functions of this factory. Outside data only refers to custom values or
- * default values in case data of the environment cannot the fetched.
- */
-public class Uc4HazelcastJetFactory {
-
-  // Information per History Service
-  private Properties kafkaInputReadPropsForPipeline;
-  private Properties kafkaConfigPropsForPipeline;
-  private Properties kafkaFeedbackPropsForPipeline;
-  private Properties kafkaWritePropsForPipeline;
-  private String kafkaInputTopic;
-  private String kafkaOutputTopic;
-  private JetInstance uc4JetInstance;
-  private Pipeline uc4JetPipeline;
-  // UC4 specific
-  private String kafkaConfigurationTopic;
-  private String kafkaFeedbackTopic;
-  private int windowSize;
-
-  /////////////////////////////////////
-  // Layer 1 - Hazelcast Jet Run Job //
-  /////////////////////////////////////
-
-  /**
-   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
-   * JetInstance as a job.
-   *
-   * @param jobName The name of the job.
-   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
-   */
-  public void runUc4Job(final String jobName) throws IllegalStateException { // NOPMD
-    Objects.requireNonNull(this.uc4JetInstance, "Jet instance is not set.");
-    Objects.requireNonNull(this.uc4JetPipeline, "Jet pipeline is not set.");
-
-    // Adds the job name and joins a job to the JetInstance defined in this factory
-    final JobConfig jobConfig = new JobConfig()
-        .registerSerializer(ValueGroup.class, ValueGroupSerializer.class)
-        .registerSerializer(SensorGroupKey.class, SensorGroupKeySerializer.class)
-        .registerSerializer(ImmutableSensorRegistry.class,
-            ImmutableSensorRegistryUc4Serializer.class)
-        .setName(jobName);
-    this.uc4JetInstance.newJobIfAbsent(this.uc4JetPipeline, jobConfig).join();
-  }
-
-  /////////////
-  // Layer 2 //
-  /////////////
-
-  /**
-   * Build a Hazelcast JetInstance used to run a job on.
-   *
-   * @param logger The logger specified for this JetInstance.
-   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
-   *        environment.
-   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
-   * @return A Uc4HazelcastJetFactory containing a set JetInstance.
-   */
-  public Uc4HazelcastJetFactory buildUc4JetInstanceFromEnv(final Logger logger,
-      final String bootstrapServerDefault,
-      final String hzKubernetesServiceDnsKey) {
-    this.uc4JetInstance = new JetInstanceBuilder()
-        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
-        .build();
-    return this;
-  }
-
-  /**
-   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
-   * topic and kafka properties defined in this factory beforehand.
-   *
-   * @return A Uc4HazelcastJetFactory containg a set pipeline.
-   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
-   *         cannot be built.
-   */
-  public Uc4HazelcastJetFactory buildUc4Pipeline() throws IllegalStateException { // NOPMD
-
-    final String defaultPipelineWarning = "Cannot build pipeline."; // NOPMD
-
-    // Check if Properties for the Kafka Input are set.
-    if (this.kafkaInputReadPropsForPipeline == null) {
-      throw new IllegalStateException("Kafka Input Read Properties for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if Properties for the Kafka Output are set.
-    if (this.kafkaWritePropsForPipeline == null) {
-      throw new IllegalStateException("Kafka Write Properties for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if Properties for the Kafka Config Read are set.
-    if (this.kafkaConfigPropsForPipeline == null) {
-      throw new IllegalStateException("Kafka Config Read Properties for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if Properties for the Kafka Feedback Read are set.
-    if (this.kafkaFeedbackPropsForPipeline == null) {
-      throw new IllegalStateException("Kafka Feedback Read Properties for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the Kafka input topic is set.
-    if (this.kafkaInputTopic == null) {
-      throw new IllegalStateException("Kafka input topic for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the Kafka output topic is set.
-    if (this.kafkaOutputTopic == null) {
-      throw new IllegalStateException("kafka output topic for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the Kafka config topic is set.
-    if (this.kafkaConfigurationTopic == null) {
-      throw new IllegalStateException("configuratin topic for pipeline not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if the Kafka feedback topic is set.
-    if (this.kafkaFeedbackTopic == null) {
-      throw new IllegalStateException("Feedback topic not set! "
-          + defaultPipelineWarning);
-    }
-
-    // Check if window size for tumbling window is set.
-    if (this.windowSize <= 0) {
-      throw new IllegalStateException("window size for pipeline not set or not greater than 0! "
-          + defaultPipelineWarning);
-    }
-
-    // Build Pipeline Using the pipelineBuilder
-    final Uc4PipelineBuilder pipeBuilder = new Uc4PipelineBuilder();
-    this.uc4JetPipeline =
-        pipeBuilder.build(this.kafkaInputReadPropsForPipeline,
-            this.kafkaConfigPropsForPipeline,
-            this.kafkaFeedbackPropsForPipeline,
-            this.kafkaWritePropsForPipeline,
-            this.kafkaInputTopic, this.kafkaOutputTopic,
-            this.kafkaConfigurationTopic,
-            this.kafkaFeedbackTopic,
-            this.windowSize);
-    // Return Uc4HazelcastJetBuilder factory
-    return this;
-  }
-
-  /////////////
-  // Layer 3 //
-  /////////////
-
-  /**
-   * Sets kafka read properties for pipeline used in this builder using environment variables.
-   *
-   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
-   *        can be fetched from the environment.
-   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
-   *        url can be fetched from the environment.
-   * @return The Uc4HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
-   */
-  public Uc4HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
-      final String bootstrapServersDefault,
-      final String schemaRegistryUrlDefault,
-      final String jobName) {
-    // Use KafkaPropertiesBuilder to build a properties object used for kafka
-    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
-
-    final Properties kafkaInputReadProps =
-        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
-            schemaRegistryUrlDefault, jobName,
-            StringDeserializer.class.getCanonicalName(),
-            KafkaAvroDeserializer.class.getCanonicalName());
-
-    final Properties kafkaConfigReadProps =
-        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
-            schemaRegistryUrlDefault,
-            jobName,
-            EventDeserializer.class.getCanonicalName(),
-            StringDeserializer.class.getCanonicalName());
-
-    final Properties kafkaAggregationReadProps =
-        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
-            schemaRegistryUrlDefault,
-            jobName,
-            StringDeserializer.class.getCanonicalName(),
-            KafkaAvroDeserializer.class.getCanonicalName());
-
-    this.kafkaInputReadPropsForPipeline = kafkaInputReadProps;
-    this.kafkaConfigPropsForPipeline = kafkaConfigReadProps;
-    this.kafkaFeedbackPropsForPipeline = kafkaAggregationReadProps;
-    return this;
-  }
-
-  /**
-   * Sets kafka write properties for pipeline used in this builder using environment variables.
-   *
-   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
-   *        can be fetched from the environment.
-   * @return The Uc4HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
-   */
-  public Uc4HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
-      final String bootstrapServersDefault, final String schemaRegistryUrlDefault) {
-    // Use KafkaPropertiesBuilder to build a properties object used for kafka
-    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
-    final Properties kafkaWriteProps =
-        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault,
-            schemaRegistryUrlDefault,
-            StringSerializer.class.getCanonicalName(),
-            KafkaAvroSerializer.class.getCanonicalName());
-    this.kafkaWritePropsForPipeline = kafkaWriteProps;
-    return this;
-  }
-
-  /**
-   * Sets the kafka input topic for the pipeline used in this builder.
-   *
-   * @param inputTopic The kafka topic used as the pipeline input.
-   * @return A Uc4HazelcastJetBuilder factory with a set kafkaInputTopic.
-   */
-  public Uc4HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
-      final String inputTopic) {
-    this.kafkaInputTopic = inputTopic;
-    return this;
-  }
-
-  /**
-   * Sets the kafka input output for the pipeline used in this builder.
-   *
-   * @param outputTopic The kafka topic used as the pipeline output.
-   * @return A Uc4HazelcastJetBuilder factory with a set kafkaOutputTopic.
-   */
-  public Uc4HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
-    this.kafkaOutputTopic = outputTopic;
-    return this;
-  }
-
-
-  /**
-   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
-   *
-   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
-   *        environment.
-   * @return A Uc4HazelcastJetBuilder factory with a set kafkaInputTopic.
-   */
-  public Uc4HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
-      final String defaultInputTopic) {
-    this.kafkaInputTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
-        defaultInputTopic);
-    return this;
-  }
-
-  /**
-   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
-   *
-   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
-   *        environment.
-   * @return A Uc4HazelcastJetBuilder factory with a set kafkaOutputTopic.
-   */
-  public Uc4HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
-      final String defaultOutputTopic) {
-    this.kafkaOutputTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
-        defaultOutputTopic);
-    return this;
-  }
-
-  /**
-   * Sets the window size for the pipeline used in this builder.
-   *
-   * @param windowSize the window size to be used for this pipeline.
-   * @return A Uc4HazelcastJetFactory with a set windowSize.
-   */
-  public Uc4HazelcastJetFactory setCustomWindowSize(// NOPMD
-      final int windowSize) {
-    this.windowSize = windowSize;
-    return this;
-  }
-
-  /**
-   * Sets the window size for the pipeline used in this builder from the environment.
-   *
-   * @param defaultWindowSize the default window size to be used for this pipeline when none is set
-   *        in the environment.
-   * @return A Uc4HazelcastJetFactory with a set windowSize.
-   */
-  public Uc4HazelcastJetFactory setWindowSizeFromEnv(// NOPMD
-      final String defaultWindowSize) {
-    final String windowSize = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.WINDOW_SIZE_UC4),
-        defaultWindowSize);
-    final int windowSizeNumber = Integer.parseInt(windowSize);
-    this.windowSize = windowSizeNumber;
-    return this;
-  }
-
-  /**
-   * Sets the configuration topic for the pipeline used in this builder.
-   *
-   * @param kafkaConfigurationTopic the configuration topic to be used for this pipeline.
-   * @return A Uc4HazelcastJetFactory with a set configuration topic.
-   */
-  public Uc4HazelcastJetFactory setCustomKafkaConfigurationTopic(// NOPMD
-      final String kafkaConfigurationTopic) {
-    this.kafkaConfigurationTopic = kafkaConfigurationTopic;
-    return this;
-  }
-
-  /**
-   * Sets the configuration topic for the pipeline used in this builder from the environment.
-   *
-   * @param defaultKafkaConfigurationTopic the default configuration topic to be used for this
-   *        pipeline when none is set in the environment.
-   * @return A Uc4HazelcastJetFactory with a set kafkaConfigurationTopic.
-   */
-  public Uc4HazelcastJetFactory setKafkaConfigurationTopicFromEnv(// NOPMD
-      final String defaultKafkaConfigurationTopic) {
-    this.kafkaConfigurationTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC),
-        defaultKafkaConfigurationTopic);
-    return this;
-  }
-
-  /**
-   * Sets the Feedback topic for the pipeline used in this builder.
-   *
-   * @param kafkaFeedbackTopic the Feedback topic to be used for this pipeline.
-   * @return A Uc4HazelcastJetFactory with a set Feedback topic.
-   */
-  public Uc4HazelcastJetFactory setCustomKafkaFeedbackTopic(// NOPMD
-      final String kafkaFeedbackTopic) {
-    this.kafkaFeedbackTopic = kafkaFeedbackTopic;
-    return this;
-  }
-
-  /**
-   * Sets the Feedback topic for the pipeline used in this builder from the environment.
-   *
-   * @param defaultKafkaFeedbackTopic the default Feedback topic to be used for this pipeline when
-   *        none is set in the environment.
-   * @return A Uc4HazelcastJetFactory with a set kafkaFeedbackTopic.
-   */
-  public Uc4HazelcastJetFactory setKafkaFeedbackTopicFromEnv(// NOPMD
-      final String defaultKafkaFeedbackTopic) {
-    this.kafkaFeedbackTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_FEEDBACK_TOPIC),
-        defaultKafkaFeedbackTopic);
-    return this;
-  }
-
-}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineBuilder.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineFactory.java
similarity index 81%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineBuilder.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineFactory.java
index 87d10bc5c0fd417082c12c5a1a26bbf9116c8a50..59b5941fb9f0090074869b00d49ad26c68e40165 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineBuilder.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineFactory.java
@@ -15,6 +15,7 @@ import com.hazelcast.jet.pipeline.StreamSource;
 import com.hazelcast.jet.pipeline.StreamStage;
 import com.hazelcast.jet.pipeline.StreamStageWithKey;
 import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.time.Duration;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -22,26 +23,29 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import rocks.theodolite.benchmarks.commons.configuration.events.Event;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.PipelineFactory;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
 import rocks.theodolite.benchmarks.commons.model.records.AggregatedActivePowerRecord;
 import rocks.theodolite.benchmarks.commons.model.sensorregistry.SensorRegistry;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.AggregatedActivePowerRecordAccumulator;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ChildParentsTransformer;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKey;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroup;
+
 
 /**
- * Builder to build a HazelcastJet Pipeline for UC4 which can be used for stream processing using
- * Hazelcast Jet.
+ * PipelineFactory for use case 4. Allows to build and extend pipelines.
  */
-public class Uc4PipelineBuilder {
+public class Uc4PipelineFactory extends PipelineFactory {
 
-  private static final Logger LOGGER = LoggerFactory.getLogger(Uc4PipelineBuilder.class);
   private static final String SENSOR_PARENT_MAP_NAME = "SensorParentMap";
 
+  private final Properties kafkaConfigPropsForPipeline;
+  private final Properties kafkaFeedbackPropsForPipeline;
+
+  private final String kafkaConfigurationTopic;
+  private final String kafkaFeedbackTopic;
+
+  private final Duration emitPeriod;
+
+
   /**
    * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
    *
@@ -58,10 +62,8 @@ public class Uc4PipelineBuilder {
    * @param kafkaConfigurationTopic The name of the configuration topic used for the pipeline.
    * @param kafkaFeedbackTopic The name of the feedback topic used for the pipeline.
    * @param windowSize The window size of the tumbling window used in this pipeline.
-   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
-   *         for UC3.
    */
-  public Pipeline build(final Properties kafkaInputReadPropsForPipeline, // NOPMD
+  public Uc4PipelineFactory(final Properties kafkaInputReadPropsForPipeline, // NOPMD
       final Properties kafkaConfigPropsForPipeline,
       final Properties kafkaFeedbackPropsForPipeline,
       final Properties kafkaWritePropsForPipeline,
@@ -69,47 +71,55 @@ public class Uc4PipelineBuilder {
       final String kafkaOutputTopic,
       final String kafkaConfigurationTopic,
       final String kafkaFeedbackTopic,
-      final int windowSize) {
-
-    if (LOGGER.isInfoEnabled()) {
-      LOGGER.info("kafkaConfigProps: " + kafkaConfigPropsForPipeline);
-      LOGGER.info("kafkaFeedbackProps: " + kafkaFeedbackPropsForPipeline);
-      LOGGER.info("kafkaWriteProps: " + kafkaWritePropsForPipeline);
-    }
+      final Duration windowSize) {
+
+    super(kafkaInputReadPropsForPipeline, kafkaInputTopic,
+        kafkaWritePropsForPipeline, kafkaOutputTopic);
+    this.kafkaConfigPropsForPipeline = kafkaConfigPropsForPipeline;
+    this.kafkaFeedbackPropsForPipeline = kafkaFeedbackPropsForPipeline;
+    this.kafkaConfigurationTopic = kafkaConfigurationTopic;
+    this.kafkaFeedbackTopic = kafkaFeedbackTopic;
+    this.emitPeriod = windowSize;
+  }
 
-    // The pipeline for this Use Case
-    final Pipeline uc4Pipeline = Pipeline.create();
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @return a pipeline used which can be used in a Hazelcast Jet Instance to process data for UC4.
+   */
+  @Override
+  public Pipeline buildPipeline() {
 
     // Sources for this use case
     final StreamSource<Entry<Event, String>> configSource =
-        KafkaSources.kafka(kafkaConfigPropsForPipeline, kafkaConfigurationTopic);
+        KafkaSources.kafka(this.kafkaConfigPropsForPipeline, this.kafkaConfigurationTopic);
 
     final StreamSource<Entry<String, ActivePowerRecord>> inputSource =
-        KafkaSources.kafka(kafkaInputReadPropsForPipeline, kafkaInputTopic);
+        KafkaSources.kafka(this.kafkaReadPropsForPipeline, this.kafkaInputTopic);
 
     final StreamSource<Entry<String, AggregatedActivePowerRecord>> aggregationSource =
-        KafkaSources.kafka(kafkaFeedbackPropsForPipeline, kafkaFeedbackTopic);
+        KafkaSources.kafka(this.kafkaFeedbackPropsForPipeline, this.kafkaFeedbackTopic);
 
     // Extend UC4 topology to pipeline
     final StreamStage<Entry<String, AggregatedActivePowerRecord>> uc4Aggregation =
-        this.extendUc4Topology(uc4Pipeline, inputSource, aggregationSource, configSource,
-            windowSize);
+        this.extendUc4Topology(inputSource, aggregationSource, configSource);
 
     // Add Sink2: Write back to kafka feedback/aggregation topic
     uc4Aggregation.writeTo(KafkaSinks.kafka(
-        kafkaWritePropsForPipeline, kafkaFeedbackTopic));
+        this.kafkaWritePropsForPipeline, this.kafkaFeedbackTopic));
 
     // Log aggregation product
-    uc4Aggregation.writeTo(Sinks.logger());
+    // uc4Aggregation.writeTo(Sinks.logger());
 
     // Add Sink2: Write back to kafka output topic
     uc4Aggregation.writeTo(KafkaSinks.kafka(
-        kafkaWritePropsForPipeline, kafkaOutputTopic));
+        this.kafkaWritePropsForPipeline, this.kafkaOutputTopic));
 
     // Return the pipeline
-    return uc4Pipeline;
+    return this.pipe;
   }
 
+
   /**
    * Extends to a blank Hazelcast Jet Pipeline the UC4 topology defines by theodolite.
    *
@@ -130,24 +140,21 @@ public class Uc4PipelineBuilder {
    * (6) Aggregate data over the window
    * </p>
    *
-   * @param pipe The blank pipeline to extend the logic to.
    * @param inputSource A streaming source with {@code ActivePowerRecord} data.
    * @param aggregationSource A streaming source with aggregated data.
    * @param configurationSource A streaming source delivering a {@code SensorRegistry}.
-   * @param windowSize The window size used to aggregate over.
    * @return A {@code StreamSource<String,Double>} with sensorKeys or groupKeys mapped to their
    *         according aggregated values. The data can be further modified or directly be linked to
    *         a Hazelcast Jet sink.
    */
-  public StreamStage<Entry<String, AggregatedActivePowerRecord>> extendUc4Topology(// NOPMD
-      final Pipeline pipe,
-      final StreamSource<Entry<String, ActivePowerRecord>> inputSource,
-      final StreamSource<Entry<String, AggregatedActivePowerRecord>> aggregationSource,
-      final StreamSource<Entry<Event, String>> configurationSource, final int windowSize) {
+  public StreamStage<Map.Entry<String, AggregatedActivePowerRecord>> extendUc4Topology(// NOPMD
+      final StreamSource<Map.Entry<String, ActivePowerRecord>> inputSource,
+      final StreamSource<Map.Entry<String, AggregatedActivePowerRecord>> aggregationSource,
+      final StreamSource<Map.Entry<Event, String>> configurationSource) {
 
     //////////////////////////////////
     // (1) Configuration Stream
-    pipe.readFrom(configurationSource)
+    this.pipe.readFrom(configurationSource)
         .withNativeTimestamps(0)
         .filter(entry -> entry.getKey() == Event.SENSOR_REGISTRY_CHANGED
             || entry.getKey() == Event.SENSOR_REGISTRY_STATUS)
@@ -160,13 +167,13 @@ public class Uc4PipelineBuilder {
 
     //////////////////////////////////
     // (1) Sensor Input Stream
-    final StreamStage<Entry<String, ActivePowerRecord>> inputStream = pipe
+    final StreamStage<Entry<String, ActivePowerRecord>> inputStream = this.pipe
         .readFrom(inputSource)
         .withNativeTimestamps(0);
 
     //////////////////////////////////
     // (1) Aggregation Stream
-    final StreamStage<Entry<String, ActivePowerRecord>> aggregations = pipe
+    final StreamStage<Entry<String, ActivePowerRecord>> aggregations = this.pipe
         .readFrom(aggregationSource)
         .withNativeTimestamps(0)
         .map(entry -> { // Map Aggregated to ActivePowerRecord
@@ -224,8 +231,7 @@ public class Uc4PipelineBuilder {
     // (5) UC4 Last Value Map
     // Table with tumbling window differentiation [ (sensorKey,Group) , value ],Time
     final StageWithWindow<Entry<SensorGroupKey, ActivePowerRecord>> windowedLastValues =
-        dupliAsFlatmappedStage
-            .window(WindowDefinition.tumbling(windowSize));
+        dupliAsFlatmappedStage.window(WindowDefinition.tumbling(this.emitPeriod.toMillis()));
 
     final AggregateOperation1<Entry<SensorGroupKey, ActivePowerRecord>, AggregatedActivePowerRecordAccumulator, AggregatedActivePowerRecord> aggrOp = // NOCS
         AggregateOperation
@@ -251,7 +257,6 @@ public class Uc4PipelineBuilder {
   }
 
 
-
   /**
    * FlatMap function used to process the configuration input for UC4.
    */
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroup.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ValueGroup.java
similarity index 95%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroup.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ValueGroup.java
index b5f5fc7cb2822667dcaa26560fa83b2da3a513d9..1d6cb0b948007f0d03ea8e7515e0535521735020 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroup.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ValueGroup.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
 import java.util.Objects;
 import java.util.Set;
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroupSerializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ValueGroupSerializer.java
similarity index 93%
rename from theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroupSerializer.java
rename to theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ValueGroupSerializer.java
index e136d1da0cd8362fed4f76807e7f8725c2075b7f..3e3bd69277a0c6becb02f9213a93bec0bd25972f 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroupSerializer.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/ValueGroupSerializer.java
@@ -1,4 +1,4 @@
-package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
 
 import com.hazelcast.nio.ObjectDataInput;
 import com.hazelcast.nio.ObjectDataOutput;
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties
index e3371cc87e20e85e6e8c327955537e6e49dab86e..af877044b6e17665b6a18af41ec72ab6cedf0f91 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -1,8 +1,13 @@
-application.name=theodolite-uc1-application
+application.name=theodolite-uc4-application
 application.version=0.0.1
 
 kafka.bootstrap.servers=localhost:9092
 kafka.input.topic=input
+kafka.output.topic=output
+kafka.configuration.topic=configuration
+kafka.feedback.topic=aggregation-feedback
 
 schema.registry.url=http://localhost:8081
 
+emit.period.ms=5000
+#grace.period.ms=0
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java b/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java
index e59f4cabc5acb09943f1f53dcb881dae001ffc26..29a561d1bd039f70b2540014f970a03094418532 100644
--- a/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java
@@ -13,15 +13,19 @@ import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
 import com.hazelcast.jet.pipeline.test.Assertions;
 import com.hazelcast.jet.pipeline.test.TestSources;
 import com.hazelcast.jet.test.SerialTest;
+import java.time.Duration;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Objects;
+import java.util.Properties;
 import java.util.concurrent.CompletionException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import rocks.theodolite.benchmarks.commons.configuration.events.Event;
 import rocks.theodolite.benchmarks.commons.model.records.ActivePowerRecord;
 import rocks.theodolite.benchmarks.commons.model.records.AggregatedActivePowerRecord;
@@ -29,16 +33,13 @@ import rocks.theodolite.benchmarks.commons.model.sensorregistry.ImmutableSensorR
 import rocks.theodolite.benchmarks.commons.model.sensorregistry.MachineSensor;
 import rocks.theodolite.benchmarks.commons.model.sensorregistry.MutableAggregatedSensor;
 import rocks.theodolite.benchmarks.commons.model.sensorregistry.MutableSensorRegistry;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ImmutableSensorRegistryUc4Serializer;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKey;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKeySerializer;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroup;
-import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroupSerializer;
 
 
 @Category(SerialTest.class)
 public class Uc4PipelineTest extends JetTestSupport {
 
+  private static final Logger LOGGER = LoggerFactory.getLogger(Uc4PipelineTest.class);
+
   JetInstance testInstance = null;
   Pipeline testPipeline = null;
   StreamStage<Entry<String, AggregatedActivePowerRecord>> uc4Topology = null;
@@ -52,7 +53,8 @@ public class Uc4PipelineTest extends JetTestSupport {
     final String testLevel1GroupName = "TEST-LEVEL1-GROUP";
     final String testLevel2GroupName = "TEST-LEVEL2-GROUP";
     final Double testValueInW = 10.0;
-    final int testWindowSize = 5000; // As window size is bugged, not necessary.
+    // As window size is bugged, not necessary.
+    final Duration testWindowSize = Duration.ofMillis(5000);
 
     // Create mocked Hazelcast Jet instance with configuration
     final String testClusterName = randomName();
@@ -115,12 +117,16 @@ public class Uc4PipelineTest extends JetTestSupport {
         });
 
     // Create pipeline to test
-    final Uc4PipelineBuilder pipelineBuilder = new Uc4PipelineBuilder();
-    this.testPipeline = Pipeline.create();
-    this.uc4Topology = pipelineBuilder.extendUc4Topology(this.testPipeline,
-        testInputSource, testAggregationSource, testConfigSource, testWindowSize);
+    final Properties properties = new Properties();
+    final Uc4PipelineFactory factory = new Uc4PipelineFactory(
+        properties, properties, properties, properties, "", "",
+        "", "", testWindowSize);
 
+    this.uc4Topology =
+        factory.extendUc4Topology(testInputSource, testAggregationSource, testConfigSource);
     this.uc4Topology.writeTo(Sinks.logger());
+
+    this.testPipeline = factory.getPipe();
   }
 
   /**
@@ -129,8 +135,6 @@ public class Uc4PipelineTest extends JetTestSupport {
   @Test
   public void testOutput() {
 
-    // System.out.println("DEBUG DEBUG DEBUG || ENTERED TEST 1");
-
     // Assertion Configuration
     final int timeout = 20;
     final String testSensorName = "TEST-SENSOR";
@@ -153,11 +157,11 @@ public class Uc4PipelineTest extends JetTestSupport {
 
 
           if (collection != null) {
-            System.out.println("Collection size: " + collection.size());
+            LOGGER.info("Collection size: " + collection.size());
 
 
             for (final Entry<String, AggregatedActivePowerRecord> entry : collection) {
-              System.out.println("DEBUG || " + entry.toString());
+              LOGGER.info("Entry || " + entry.toString());
 
               final String key = entry.getKey();
               final AggregatedActivePowerRecord agg = entry.getValue();
@@ -184,10 +188,10 @@ public class Uc4PipelineTest extends JetTestSupport {
             allOkay = testLevel1contained && testLevel2contained && averageEqTest && avOk;
           }
 
-          System.out.println("testLevel1contained: " + testLevel1contained);
-          System.out.println("testLevel2contained: " + testLevel2contained);
-          System.out.println("averageEqTest: " + averageEqTest);
-          System.out.println("avOk: " + avOk);
+          LOGGER.info("Test item from Level1 contained: " + testLevel1contained);
+          LOGGER.info("Test item from Level2 contained: " + testLevel2contained);
+          LOGGER.info("Average watt value equals test watt value: " + averageEqTest);
+          LOGGER.info("Average calculation correct =: " + avOk);
 
           Assert.assertTrue("Assertion did not complete!", allOkay);
 
@@ -201,6 +205,8 @@ public class Uc4PipelineTest extends JetTestSupport {
           .registerSerializer(ImmutableSensorRegistry.class,
               ImmutableSensorRegistryUc4Serializer.class);
       this.testInstance.newJob(this.testPipeline, jobConfig).join();
+      Assert.fail(
+          "Job should have completed with an AssertionCompletedException, but completed normally");
 
     } catch (final CompletionException e) {
       final String errorMsg = e.getCause().getMessage();
@@ -209,14 +215,14 @@ public class Uc4PipelineTest extends JetTestSupport {
               + e.getCause(),
           errorMsg.contains(AssertionCompletedException.class.getName()));
     } catch (final Exception e) {
-      System.out.println("ERRORORORO TEST BROKEN !!!!");
-      System.out.println(e);
+      LOGGER.error("Test is broken", e);
     }
   }
 
 
   @After
   public void after() {
+    LOGGER.info("Shutting down the test instances");
     // Shuts down all running Jet Instances
     Jet.shutdownAll();
   }
diff --git a/theodolite-benchmarks/uc4-kstreams/src/main/java/rocks/theodolite/benchmarks/uc4/kstreams/AggregationService.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/rocks/theodolite/benchmarks/uc4/kstreams/AggregationService.java
index 26ea02957fb013c61c4ee0c3e2f280b0b9b8c993..4119fbefa3fb98b1573b757285cca688210a5bcc 100644
--- a/theodolite-benchmarks/uc4-kstreams/src/main/java/rocks/theodolite/benchmarks/uc4/kstreams/AggregationService.java
+++ b/theodolite-benchmarks/uc4-kstreams/src/main/java/rocks/theodolite/benchmarks/uc4/kstreams/AggregationService.java
@@ -5,7 +5,6 @@ import java.util.concurrent.CompletableFuture;
 import org.apache.commons.configuration2.Configuration;
 import org.apache.kafka.streams.KafkaStreams;
 import rocks.theodolite.benchmarks.commons.commons.configuration.ServiceConfigurations;
-import rocks.theodolite.benchmarks.commons.kstreams.ConfigurationKeys;
 
 /**
  * A microservice that manages the history and, therefore, stores and aggregates incoming
@@ -37,11 +36,11 @@ public class AggregationService {
   private void createKafkaStreamsApplication() {
     final Uc4KafkaStreamsBuilder uc4KafkaStreamsBuilder = new Uc4KafkaStreamsBuilder(this.config);
     uc4KafkaStreamsBuilder
-        .feedbackTopic(this.config.getString(ConfigurationKeys.KAFKA_FEEDBACK_TOPIC))
-        .outputTopic(this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC))
-        .configurationTopic(this.config.getString(ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC))
-        .emitPeriod(Duration.ofMillis(this.config.getLong(ConfigurationKeys.EMIT_PERIOD_MS)))
-        .gracePeriod(Duration.ofMillis(this.config.getLong(ConfigurationKeys.GRACE_PERIOD_MS)));
+        .feedbackTopic(this.config.getString(Uc4ConfigurationKeys.KAFKA_FEEDBACK_TOPIC))
+        .outputTopic(this.config.getString(Uc4ConfigurationKeys.KAFKA_OUTPUT_TOPIC))
+        .configurationTopic(this.config.getString(Uc4ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC))
+        .emitPeriod(Duration.ofMillis(this.config.getLong(Uc4ConfigurationKeys.EMIT_PERIOD_MS)))
+        .gracePeriod(Duration.ofMillis(this.config.getLong(Uc4ConfigurationKeys.GRACE_PERIOD_MS)));
 
     final KafkaStreams kafkaStreams = uc4KafkaStreamsBuilder.build();
 
diff --git a/theodolite-benchmarks/uc4-kstreams/src/main/java/rocks/theodolite/benchmarks/uc4/kstreams/Uc4ConfigurationKeys.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/rocks/theodolite/benchmarks/uc4/kstreams/Uc4ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..dc5a62bfccd63df6a58a53123528775a65358cc5
--- /dev/null
+++ b/theodolite-benchmarks/uc4-kstreams/src/main/java/rocks/theodolite/benchmarks/uc4/kstreams/Uc4ConfigurationKeys.java
@@ -0,0 +1,20 @@
+package rocks.theodolite.benchmarks.uc4.kstreams;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class Uc4ConfigurationKeys {
+
+  public static final String KAFKA_FEEDBACK_TOPIC = "kafka.feedback.topic";
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String KAFKA_CONFIGURATION_TOPIC = "kafka.configuration.topic";
+
+  public static final String EMIT_PERIOD_MS = "emit.period.ms";
+
+  public static final String GRACE_PERIOD_MS = "grace.period.ms";
+
+  private Uc4ConfigurationKeys() {}
+
+}
diff --git a/theodolite-benchmarks/uc4-kstreams/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-kstreams/src/main/resources/META-INF/application.properties
index a21f7e917e3ce4a0762261ca90444613c82ab650..8a1d86ae3ecc419badae62d62c102ec8fafb4730 100644
--- a/theodolite-benchmarks/uc4-kstreams/src/main/resources/META-INF/application.properties
+++ b/theodolite-benchmarks/uc4-kstreams/src/main/resources/META-INF/application.properties
@@ -10,4 +10,7 @@ kafka.output.topic=output
 schema.registry.url=http://localhost:8081
 
 emit.period.ms=5000
-grace.period.ms=0
\ No newline at end of file
+grace.period.ms=0
+
+# Kafka Streams Config
+commit.interval.ms=5000	
diff --git a/theodolite/src/main/kotlin/rocks/theodolite/kubernetes/patcher/ConfigMapYamlPatcher.kt b/theodolite/src/main/kotlin/rocks/theodolite/kubernetes/patcher/ConfigMapYamlPatcher.kt
new file mode 100644
index 0000000000000000000000000000000000000000..272e3785e274eecbb85e3618113ae5137d5314bc
--- /dev/null
+++ b/theodolite/src/main/kotlin/rocks/theodolite/kubernetes/patcher/ConfigMapYamlPatcher.kt
@@ -0,0 +1,38 @@
+package rocks.theodolite.kubernetes.patcher
+
+import io.fabric8.kubernetes.api.model.ConfigMap
+import io.fabric8.kubernetes.api.model.HasMetadata
+import org.yaml.snakeyaml.DumperOptions
+import org.yaml.snakeyaml.Yaml
+
+/**
+ * The ConfigMapYamlPatcher allows to add/modify a key-value pair in a YAML file of a ConfigMap
+ *
+ * @property fileName of the YAML file in the configmap that should be modified.
+ * @property variableName Name of the environment variable to be patched.
+ */
+class ConfigMapYamlPatcher(
+    private val fileName: String,
+    private val variableName: String
+) : AbstractPatcher() {
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is ConfigMap) {
+            val yamlFile = resource.data[fileName]
+
+            // Configure YAML parser
+            val dumperOptions = DumperOptions()
+            // used to omit curly braces around and new lines for every property
+            dumperOptions.defaultFlowStyle = DumperOptions.FlowStyle.BLOCK
+            val parser = Yaml(dumperOptions)
+
+            // Change value
+            val yaml = parser.loadAs(yamlFile, LinkedHashMap<String, String>()::class.java)
+            yaml[variableName] = value
+
+            // Convert back to String and set in Kubernetes resource
+            resource.data[fileName] = parser.dump(yaml)
+        }
+        return resource
+    }
+}
diff --git a/theodolite/src/main/kotlin/rocks/theodolite/kubernetes/patcher/PatcherFactory.kt b/theodolite/src/main/kotlin/rocks/theodolite/kubernetes/patcher/PatcherFactory.kt
index b5513b5b796c4ab8119db2a2da17a7cbe7231e57..69a2528bbcd2a39d535d77571f7ca520b8c81488 100644
--- a/theodolite/src/main/kotlin/rocks/theodolite/kubernetes/patcher/PatcherFactory.kt
+++ b/theodolite/src/main/kotlin/rocks/theodolite/kubernetes/patcher/PatcherFactory.kt
@@ -73,6 +73,10 @@ class PatcherFactory {
                     "ImagePatcher" -> ImagePatcher(
                         container = patcherDefinition.properties["container"]!!
                     )
+                    "ConfigMapYamlPatcher" -> ConfigMapYamlPatcher(
+                        fileName = patcherDefinition.properties["fileName"]!!,
+                        variableName = patcherDefinition.properties["variableName"]!!
+                    )
                     "NamePatcher" -> NamePatcher()
                     "ServiceSelectorPatcher" -> ServiceSelectorPatcher(
                         variableName = patcherDefinition.properties["label"]!!
diff --git a/theodolite/src/test/resources/cpu-deployment.yaml b/theodolite/src/test/resources/cpu-deployment.yaml
index 9845648949babd260192e6c6fa652db976c04288..4ed00ba8b7b0c11481379c259b9b3a26e8eedef8 100644
--- a/theodolite/src/test/resources/cpu-deployment.yaml
+++ b/theodolite/src/test/resources/cpu-deployment.yaml
@@ -26,8 +26,6 @@ spec:
               value: "http://my-confluent-cp-schema-registry:8081"
             - name: JAVA_OPTS
               value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               cpu: 1000m
diff --git a/theodolite/src/test/resources/cpu-memory-deployment.yaml b/theodolite/src/test/resources/cpu-memory-deployment.yaml
index eaae989abb1f3b4fa44f032eee700181fb75e48e..c6231f3508b6fa580f70f33a39f1afab55072866 100644
--- a/theodolite/src/test/resources/cpu-memory-deployment.yaml
+++ b/theodolite/src/test/resources/cpu-memory-deployment.yaml
@@ -26,8 +26,6 @@ spec:
               value: "http://my-confluent-cp-schema-registry:8081"
             - name: JAVA_OPTS
               value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite/src/test/resources/memory-deployment.yaml b/theodolite/src/test/resources/memory-deployment.yaml
index 7af278b8c6b2efd13adbcc77e2db5a7b4c4478ad..57283feabb3613d5f248bb8a91c42a3e38b6aa82 100644
--- a/theodolite/src/test/resources/memory-deployment.yaml
+++ b/theodolite/src/test/resources/memory-deployment.yaml
@@ -26,8 +26,6 @@ spec:
               value: "http://my-confluent-cp-schema-registry:8081"
             - name: JAVA_OPTS
               value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
           resources:
             limits:
               memory: 4Gi
diff --git a/theodolite/src/test/resources/no-resources-deployment.yaml b/theodolite/src/test/resources/no-resources-deployment.yaml
index 0687a3e042575951ec903492589101c122406f7f..1ace4851ef731758b1edf445077e295684bccdc0 100644
--- a/theodolite/src/test/resources/no-resources-deployment.yaml
+++ b/theodolite/src/test/resources/no-resources-deployment.yaml
@@ -26,8 +26,6 @@ spec:
               value: "http://my-confluent-cp-schema-registry:8081"
             - name: JAVA_OPTS
               value: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=5555"
-            - name: COMMIT_INTERVAL_MS # Set as default for the applications
-              value: "100"
         - name: prometheus-jmx-exporter
           image: "solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143"
           command: