diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a86f4124cb15cd60c990589139569f1b066d7d89..0892e983c645823e825e27619dae71e54685ffea 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,7 +1,7 @@
 workflow:
   rules:
     - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
-    - if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"'
+    - if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_REF_PROTECTED != "true"'
       when: never
     - when: always
 
@@ -10,6 +10,7 @@ stages:
   - test
   - check
   - deploy
+  - smoketest
 
 default:
   tags:
@@ -20,9 +21,9 @@ default:
     - exec-dind
   # see https://docs.gitlab.com/ee/ci/docker/using_docker_build.html#tls-enabled
   # for image usage and settings for building with TLS and docker in docker
-  image: docker:19.03.1
+  image: docker:20.10.12
   services:
-    - docker:19.03.1-dind
+    - docker:20.10.12-dind
   variables:
     DOCKER_TLS_CERTDIR: "/certs"
 
@@ -33,12 +34,22 @@ default:
   script:
     - mkdir -p /kaniko/.docker
     - echo "{\"auths\":{\"${CR_HOST}\":{\"auth\":\"$(printf "%s:%s" "${CR_USER}" "${CR_PW}" | base64 | tr -d '\n')\"}}}" > /kaniko/.docker/config.json
-    - DOCKER_TAG_NAME=$(echo $CI_COMMIT_REF_SLUG- | sed 's/^master-$//')
-    - "[ ! $CI_COMMIT_TAG ] && KANIKO_D=\"$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:${DOCKER_TAG_NAME}latest\""
-    - "[ ! $CI_COMMIT_TAG ] && KANIKO_D=\"$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$DOCKER_TAG_NAME$CI_COMMIT_SHORT_SHA\""
-    - "[ $CI_COMMIT_TAG ] && KANIKO_D=\"$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$CI_COMMIT_TAG\""
+    - >
+      if [ $IMAGE_TAG ]; then
+        KANIKO_D="$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$IMAGE_TAG"
+      elif [ $CI_COMMIT_TAG ]; then
+        KANIKO_D="$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$CI_COMMIT_TAG"
+      else
+        DOCKER_TAG_NAME=$(echo $CI_COMMIT_REF_SLUG- | sed 's/^master-$//')
+        KANIKO_D="$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:${DOCKER_TAG_NAME}latest"
+        KANIKO_D="$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$DOCKER_TAG_NAME$CI_COMMIT_SHORT_SHA"
+      fi
     - "[ $DOCKERFILE ] && KANIKO_DOCKERFILE=\"--dockerfile $DOCKERFILE\""
     - /kaniko/executor --context `pwd`/$CONTEXT $KANIKO_DOCKERFILE $KANIKO_D
+    - echo "PUBLISHED_IMAGE_TAG=${CI_COMMIT_TAG-$CI_COMMIT_SHORT_SHA}" >> build.env
+  artifacts:
+    reports:
+      dotenv: build.env
 
 
 # Theodolite Docs
@@ -71,28 +82,39 @@ test-docs-links:
   extends: .docs
   needs:
     - build-docs
-  script: bundle exec htmlproofer --assume-extension --allow_hash_href --url-ignore "/favicon.ico" ./_site
+  script: bundle exec htmlproofer --assume-extension --allow_hash_href ./_site
+
+build-docs-crds:
+  stage: build
+  image:
+    name: ghcr.io/fybrik/crdoc:0.6.1
+    entrypoint: [""]
+  script: /crdoc --resources theodolite/crd/ --template docs/api-reference/crds.tmpl --output docs/api-reference/crds.ref.md
+  artifacts:
+    paths:
+      - docs/api-reference/crds.ref.md
+    expire_in: 1 week
+  rules:
+    - changes:
+      - docs/api-reference/crds.tmpl
+      - theodolite/crd/**/*
+    - when: manual
+      allow_failure: true
 
 test-docs-crds-regression:
   stage: test
-  image: golang
+  needs:
+    - build-docs-crds
+  image: alpine:3.15
   before_script:
     - cd docs
-    - go install fybrik.io/crdoc@latest
   script:
-    - crdoc --resources ../theodolite/crd/ --template api-reference/crds.tmpl  --output api-reference/crds.ref.md
     - cmp api-reference/crds.md api-reference/crds.ref.md
   artifacts:
     when: on_failure
     paths:
       - docs/api-reference/crds.ref.md
     expire_in: 1 week
-  rules:
-    - changes:
-      - docs/api-reference/crds.tmpl
-      - theodolite/crd/**/*
-    - when: manual
-      allow_failure: true
 
 
 # Theodolite Helm Chart
@@ -104,6 +126,11 @@ lint-helm:
     name: alpine/helm:3.5.2
     entrypoint: [""]
   script: helm lint helm/
+  rules:
+  - changes:
+    - helm/*
+  - when: manual
+    allow_failure: true
 
 
 # Theodolite Benchmarks
@@ -378,7 +405,174 @@ deploy-uc4-load-generator:
     IMAGE_NAME: "theodolite-uc4-workload-generator"
     JAVA_PROJECT_NAME: "uc4-load-generator"
     JAVA_PROJECT_DEPS: "load-generator-commons"
-      
+
+deploy-http-bridge:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-http-bridge"
+    JAVA_PROJECT_NAME: "http-bridge"
+    JAVA_PROJECT_DEPS: "load-generator-commons"
+
+.smoketest-benchmarks:
+  stage: smoketest
+  extends:
+    - .dind
+  image: ghcr.io/cau-se/theodolite-build-docker-compose-jq:20.10.12
+  before_script:
+    - cd theodolite-benchmarks/docker-test
+  # variables:
+  #   TEST_LOG_FILE: "test.log"
+  script:
+    - export THEODOLITE_TAG=$PUBLISHED_IMAGE_TAG
+    - ./smoketest-runner.sh ./$DOCKER_COMPOSE_DIR
+    # - cat test.log
+  after_script:
+    - cd ./$DOCKER_COMPOSE_DIR
+    - docker-compose down
+  rules:
+    - changes:
+      - theodolite-benchmarks/*
+      - theodolite-benchmarks/{$JAVA_PROJECT_DEPS}/**/*
+      if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $DOCKER_COMPOSE_DIR && $JAVA_PROJECT_DEPS"
+    - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $DOCKER_COMPOSE_DIR && $JAVA_PROJECT_DEPS"
+      when: manual
+      allow_failure: true
+
+smoketest-uc1-kstreams:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc1-kstreams
+    - deploy-uc1-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc1-kstreams"
+    JAVA_PROJECT_DEPS: "uc1-kstreams,kstreams-commons,uc1-load-generator,load-generator-commons"
+
+smoketest-uc1-flink:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc1-flink
+    - deploy-uc1-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc1-flink"
+    JAVA_PROJECT_DEPS: "uc1-flink,flink-commons,uc1-load-generator,load-generator-commons"
+
+smoketest-uc1-beam-flink:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc1-beam-flink
+    - deploy-uc1-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc1-beam-flink"
+    JAVA_PROJECT_DEPS: "uc1-beam-flink,uc1-beam,beam-commons,uc1-load-generator,load-generator-commons"
+
+smoketest-uc1-beam-samza:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc1-beam-samza
+    - deploy-uc1-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc1-beam-samza"
+    JAVA_PROJECT_DEPS: "uc1-beam-samza,uc1-beam,beam-commons,uc1-load-generator,load-generator-commons"
+
+smoketest-uc2-kstreams:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc2-kstreams
+    - deploy-uc2-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc2-kstreams"
+    JAVA_PROJECT_DEPS: "uc2-kstreams,kstreams-commons,uc2-load-generator,load-generator-commons"
+
+smoketest-uc2-flink:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc2-flink
+    - deploy-uc2-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc2-flink"
+    JAVA_PROJECT_DEPS: "uc2-flink,flink-commons,uc2-load-generator,load-generator-commons"
+
+smoketest-uc2-beam-flink:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc2-beam-flink
+    - deploy-uc2-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc2-beam-flink"
+    JAVA_PROJECT_DEPS: "uc2-beam-flink,uc2-beam,beam-commons,uc2-load-generator,load-generator-commons"
+
+smoketest-uc2-beam-samza:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc2-beam-samza
+    - deploy-uc2-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc2-beam-samza"
+    JAVA_PROJECT_DEPS: "uc2-beam-samza,uc2-beam,beam-commons,uc2-load-generator,load-generator-commons"
+
+smoketest-uc3-kstreams:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc3-kstreams
+    - deploy-uc3-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc3-kstreams"
+    JAVA_PROJECT_DEPS: "uc3-kstreams,kstreams-commons,uc3-load-generator,load-generator-commons"
+
+smoketest-uc3-beam-flink:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc3-beam-flink
+    - deploy-uc3-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc3-beam-flink"
+    JAVA_PROJECT_DEPS: "uc3-beam-flink,uc3-beam,beam-commons,uc3-load-generator,load-generator-commons"
+
+smoketest-uc3-beam-samza:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc3-beam-samza
+    - deploy-uc3-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc3-beam-samza"
+    JAVA_PROJECT_DEPS: "uc3-beam-samza,uc3-beam,beam-commons,uc3-load-generator,load-generator-commons"
+
+smoketest-uc4-kstreams:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc4-kstreams
+    - deploy-uc4-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc4-kstreams"
+    JAVA_PROJECT_DEPS: "uc4-kstreams,kstreams-commons,uc4-load-generator,load-generator-commons"
+
+smoketest-uc4-flink:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc4-flink
+    - deploy-uc4-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc4-flink"
+    JAVA_PROJECT_DEPS: "uc4-flink,flink-commons,uc4-load-generator,load-generator-commons"
+
+smoketest-uc4-beam-flink:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc4-beam-flink
+    - deploy-uc4-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc4-beam-flink"
+    JAVA_PROJECT_DEPS: "uc4-beam-flink,uc4-beam,beam-commons,uc4-load-generator,load-generator-commons"
+
+smoketest-uc4-beam-samza:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc4-beam-samza
+    - deploy-uc4-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc4-beam-samza"
+    JAVA_PROJECT_DEPS: "uc4-beam-samza,uc4-beam,beam-commons,uc4-load-generator,load-generator-commons"
+
 
 # Theodolite Framework
 
@@ -395,6 +589,11 @@ deploy-uc4-load-generator:
   before_script:
     - export GRADLE_USER_HOME=`pwd`/.gradle
     - cd theodolite
+  rules:
+    - changes:
+      - theodolite/**/*
+    - when: manual
+      allow_failure: true
 
 build-theodolite-jvm:
   stage: build
@@ -595,4 +794,22 @@ deploy-random-scheduler:
     - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW"
       when: manual
       allow_failure: true
-      
\ No newline at end of file
+
+deploy-buildimage-docker-compose-jq:
+  stage: deploy
+  extends:
+    - .kaniko-push
+  needs: []
+  variables:
+    DOCKER_VERSION: 20.10.12
+    IMAGE_NAME: theodolite-build-docker-compose-jq
+    IMAGE_TAG: $DOCKER_VERSION
+  before_script:
+    - cd buildimages/docker-compose-jq
+  rules:
+    - changes:
+      - buildimages/docker-compose-jq/Dockerfile
+      if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW"
+    - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $CI_PIPELINE_SOURCE == 'web'"
+      when: manual
+      allow_failure: true
diff --git a/CITATION.cff b/CITATION.cff
index 07c2dcee319f73604f95414b987f8ed5274f7e82..04640de442f4458b09e11ce3d2939c850f594556 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -8,7 +8,7 @@ authors:
     given-names: Wilhelm
     orcid: "https://orcid.org/0000-0001-6625-4335"
 title: Theodolite
-version: "0.6.1"
+version: "0.6.3"
 repository-code: "https://github.com/cau-se/theodolite"
 license: "Apache-2.0"
 doi: "10.1016/j.bdr.2021.100209"
diff --git a/buildimages/docker-compose-jq/Dockerfile b/buildimages/docker-compose-jq/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..cd3f60ba3e75ab5767acff788c3bb69c8640cd4c
--- /dev/null
+++ b/buildimages/docker-compose-jq/Dockerfile
@@ -0,0 +1,6 @@
+FROM docker:${DOCKER_VERSION:-latest}
+
+RUN apk update && \
+    apk add jq && \
+    apk add py-pip python3-dev libffi-dev openssl-dev gcc libc-dev rust cargo make && \
+    pip install docker-compose
diff --git a/codemeta.json b/codemeta.json
index 2a190092b96adb3462c011e49db3c160d639d6fe..832b570681afb143978698fd47dad5d2835c700b 100644
--- a/codemeta.json
+++ b/codemeta.json
@@ -5,10 +5,10 @@
     "codeRepository": "https://github.com/cau-se/theodolite",
     "dateCreated": "2020-03-13",
     "datePublished": "2020-07-27",
-    "dateModified": "2022-01-17",
+    "dateModified": "2022-01-24",
     "downloadUrl": "https://github.com/cau-se/theodolite/releases",
     "name": "Theodolite",
-    "version": "0.6.1",
+    "version": "0.6.3",
     "description": "Theodolite is a framework for benchmarking the horizontal and vertical scalability of cloud-native applications.",
     "developmentStatus": "active",
     "relatedLink": [
diff --git a/docs/README.md b/docs/README.md
index 52b5311295e5a96721d9aa42f7e9c319da06960c..a19f94305dfdcb1de7c46da98afbb52b28a6bfa0 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -39,5 +39,5 @@ crdoc --resources ../theodolite/crd/ --template api-reference/crds.tmpl  --outpu
 With the following command, crdoc is executed in Docker:
 
 ```sh
-docker run --rm -v "`pwd`/../theodolite/crd/":/crd -u $UID -v "`pwd`/api-reference":/api-reference ghcr.io/fybrik/crdoc:0.6.0 --resources /crd/ --template /api-reference/crds.tmpl --output /api-reference/crds.md
+docker run --rm -v "`pwd`/../theodolite/crd/":/crd -v "`pwd`/api-reference":/api-reference ghcr.io/fybrik/crdoc:0.6.1 --resources /crd/ --template /api-reference/crds.tmpl --output /api-reference/crds.md
 ```
diff --git a/docs/api-reference/crds.md b/docs/api-reference/crds.md
index 0d7e46e3a72aea642fdc629f1abb664a4f8b93f3..fb3f02ac941870dd085d06027d972e6003c7aadb 100644
--- a/docs/api-reference/crds.md
+++ b/docs/api-reference/crds.md
@@ -94,13 +94,6 @@ Resource Types:
         </tr>
     </thead>
     <tbody><tr>
-        <td><b><a href="#benchmarkspeckafkaconfig">kafkaConfig</a></b></td>
-        <td>object</td>
-        <td>
-          Contains the Kafka configuration.<br/>
-        </td>
-        <td>true</td>
-      </tr><tr>
         <td><b><a href="#benchmarkspecloadgenerator">loadGenerator</a></b></td>
         <td>object</td>
         <td>
@@ -138,103 +131,20 @@ Resource Types:
         </td>
         <td>false</td>
       </tr><tr>
-        <td><b>name</b></td>
-        <td>string</td>
+        <td><b><a href="#benchmarkspeckafkaconfig">kafkaConfig</a></b></td>
+        <td>object</td>
         <td>
-          This field exists only for technical reasons and should not be set by the user. The value of the field will be overwritten.<br/>
-          <br/>
-            <i>Default</i>: <br/>
+          Contains the Kafka configuration.<br/>
         </td>
         <td>false</td>
-      </tr></tbody>
-</table>
-
-
-### benchmark.spec.kafkaConfig
-<sup><sup>[↩ Parent](#benchmarkspec)</sup></sup>
-
-
-
-Contains the Kafka configuration.
-
-<table>
-    <thead>
-        <tr>
-            <th>Name</th>
-            <th>Type</th>
-            <th>Description</th>
-            <th>Required</th>
-        </tr>
-    </thead>
-    <tbody><tr>
-        <td><b>bootstrapServer</b></td>
-        <td>string</td>
-        <td>
-          The bootstrap servers connection string.<br/>
-        </td>
-        <td>true</td>
       </tr><tr>
-        <td><b><a href="#benchmarkspeckafkaconfigtopicsindex">topics</a></b></td>
-        <td>[]object</td>
-        <td>
-          List of topics to be created for each experiment. Alternative theodolite offers the possibility to remove certain topics after each experiment.<br/>
-        </td>
-        <td>true</td>
-      </tr></tbody>
-</table>
-
-
-### benchmark.spec.kafkaConfig.topics[index]
-<sup><sup>[↩ Parent](#benchmarkspeckafkaconfig)</sup></sup>
-
-
-
-
-
-<table>
-    <thead>
-        <tr>
-            <th>Name</th>
-            <th>Type</th>
-            <th>Description</th>
-            <th>Required</th>
-        </tr>
-    </thead>
-    <tbody><tr>
         <td><b>name</b></td>
         <td>string</td>
         <td>
-          The name of the topic.<br/>
+          This field exists only for technical reasons and should not be set by the user. The value of the field will be overwritten.<br/>
           <br/>
             <i>Default</i>: <br/>
         </td>
-        <td>true</td>
-      </tr><tr>
-        <td><b>numPartitions</b></td>
-        <td>integer</td>
-        <td>
-          The number of partitions of the topic.<br/>
-          <br/>
-            <i>Default</i>: 0<br/>
-        </td>
-        <td>false</td>
-      </tr><tr>
-        <td><b>removeOnly</b></td>
-        <td>boolean</td>
-        <td>
-          Determines if this topic should only be deleted after each experiement. For removeOnly topics the name can be a RegEx describing the topic.<br/>
-          <br/>
-            <i>Default</i>: false<br/>
-        </td>
-        <td>false</td>
-      </tr><tr>
-        <td><b>replicationFactor</b></td>
-        <td>integer</td>
-        <td>
-          The replication factor of the topic.<br/>
-          <br/>
-            <i>Default</i>: 0<br/>
-        </td>
         <td>false</td>
       </tr></tbody>
 </table>
@@ -1647,6 +1557,96 @@ The fileSystem resourceSet loads the Kubernetes manifests from the filesystem.
 </table>
 
 
+### benchmark.spec.kafkaConfig
+<sup><sup>[↩ Parent](#benchmarkspec)</sup></sup>
+
+
+
+Contains the Kafka configuration.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>bootstrapServer</b></td>
+        <td>string</td>
+        <td>
+          The bootstrap servers connection string.<br/>
+        </td>
+        <td>true</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspeckafkaconfigtopicsindex">topics</a></b></td>
+        <td>[]object</td>
+        <td>
+          List of topics to be created for each experiment. Alternative theodolite offers the possibility to remove certain topics after each experiment.<br/>
+        </td>
+        <td>true</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.kafkaConfig.topics[index]
+<sup><sup>[↩ Parent](#benchmarkspeckafkaconfig)</sup></sup>
+
+
+
+
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>name</b></td>
+        <td>string</td>
+        <td>
+          The name of the topic.<br/>
+          <br/>
+            <i>Default</i>: <br/>
+        </td>
+        <td>true</td>
+      </tr><tr>
+        <td><b>numPartitions</b></td>
+        <td>integer</td>
+        <td>
+          The number of partitions of the topic.<br/>
+          <br/>
+            <i>Default</i>: 0<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b>removeOnly</b></td>
+        <td>boolean</td>
+        <td>
+          Determines if this topic should only be deleted after each experiement. For removeOnly topics the name can be a RegEx describing the topic.<br/>
+          <br/>
+            <i>Default</i>: false<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b>replicationFactor</b></td>
+        <td>integer</td>
+        <td>
+          The replication factor of the topic.<br/>
+          <br/>
+            <i>Default</i>: 0<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
 ### benchmark.status
 <sup><sup>[↩ Parent](#benchmark)</sup></sup>
 
diff --git a/docs/api-reference/patchers.md b/docs/api-reference/patchers.md
index 77f937e38f6a0ee4084cb0ad5b5838718eabff10..bea63ccd23decef5654f257221ce0358b4f68e45 100644
--- a/docs/api-reference/patchers.md
+++ b/docs/api-reference/patchers.md
@@ -53,6 +53,27 @@ Patchers can be seen as functions which take a value as input and modify a Kuber
   * **resource**: "uc1-kstreams-deployment.yaml"
   * **example value**: "random-scheduler"
 
+* **LabelPatcher**: Changes the label of a Kubernetes Deployment or StatefulSet. The patched field is: `metadata.labels`
+  * **type**: "LabelPatcher"
+  * **resource**: "uc1-kstreams-deployment.yaml"
+  * **properties**:
+    * variableName: "app"
+  * **example value**: "theodolite-sut"
+
+* **MatchLabelPatcher**: Changes the match labels of a Kubernetes Deployment or StatefulSet. The patched field is: `spec.selector.matchLabels`
+  * **type**: "MatchLabelPatcher"
+  * **resource**: "uc1-kstreams-deployment.yaml"
+  * **properties**:
+    * variableName: "app"
+  * **example value**: "theodolite-sut"
+
+* **TemplateLabelPatcher**: Changes the template labels of a Kubernetes Deployment or StatefulSet. The patched field is: `spec.template.metadata.labels`
+  * **type**: "MatchLabelPatcher"
+  * **resource**: "uc1-kstreams-deployment.yaml"
+  * **properties**:
+    * variableName: "app"
+  * **example value**: "theodolite-sut"
+
 * **ImagePatcher**: Changes the image of a Kubernetes resource. **Currently not fully implemented.**
   * **type**: "ImagePatcher"
   * **resource**: "uc1-kstreams-deployment.yaml"
diff --git a/docs/creating-a-benchmark.md b/docs/creating-a-benchmark.md
index 122f43b645c9702d16722a3061bfde8bec7c94c6..fde8ba0759407ddea8befc18e244784a9ba34c1f 100644
--- a/docs/creating-a-benchmark.md
+++ b/docs/creating-a-benchmark.md
@@ -108,13 +108,20 @@ Suppose the resources needed by your benchmark are defined as YAML files, locate
 Benchmarks need to specify at least one supported load and resource type for which scalability can be benchmarked.
 
 Load and resource types are described by a name (used for reference from an Execution) and a list of patchers.
+Patchers can be seen as functions, which take a value as input and modify a Kubernetes resource in a patcher-specific way. Examples of patchers are the *ReplicaPatcher*, which modifies the replica specification of a deployment, or the *EnvVarPatcher*, which modifies an environment variable.
+See the [patcher API reference](api-reference/patchers) for an overview of available patchers.
+
 If a benchmark is [executed by an Execution](running-benchmarks), these patchers are used to configure SUT and load generator according to the [load and resource values](creating-an-execution) set in the Execution.
 
 ## Kafka Configuration
 
-Theodolite allows to automatically create and remove Kafka topics for each SLO experiment.
-Use the `removeOnly: True` property for topics which are created automatically by the SUT.
-For those topics, also wildcards are allowed in the topic name.
+Theodolite allows to automatically create and remove Kafka topics for each SLO experiment by setting a `kafkaConfig`.
+It `bootstrapServer` needs to point your Kafka cluster and `topics` configures the list of Kafka topics to be created/removed.
+For each topic, you configure its name, the number of partitions and the replication factor.
+
+With the `removeOnly: True` property, you can also instruct Theodolite to only remove topics and not create them.
+This is useful when benchmarking SUTs, which create topics on their own (e.g., Kafka Streams and Samza applications).
+For those topics, also wildcards are allowed in the topic name and, of course, no partition count or replication factor must be provided.
 
 
 <!-- Further information: API Reference -->
diff --git a/docs/favicon.ico b/docs/favicon.ico
new file mode 100644
index 0000000000000000000000000000000000000000..81062e21501bd98a29505433c1e3b43965f5c17d
Binary files /dev/null and b/docs/favicon.ico differ
diff --git a/docs/index.yaml b/docs/index.yaml
index 185ff1b0616b760c647a809006c48bf26c554490..509844ab0bc371d29302f90f69e769cd52a8e11b 100644
--- a/docs/index.yaml
+++ b/docs/index.yaml
@@ -1,6 +1,76 @@
 apiVersion: v1
 entries:
   theodolite:
+  - apiVersion: v2
+    appVersion: 0.6.3
+    created: "2022-01-24T13:40:40.07330713+01:00"
+    dependencies:
+    - condition: grafana.enabled
+      name: grafana
+      repository: https://grafana.github.io/helm-charts
+      version: 6.17.5
+    - condition: kube-prometheus-stack.enabled
+      name: kube-prometheus-stack
+      repository: https://prometheus-community.github.io/helm-charts
+      version: 20.0.1
+    - condition: cp-helm-charts.enabled
+      name: cp-helm-charts
+      repository: https://soerenhenning.github.io/cp-helm-charts
+      version: 0.6.0
+    - condition: kafka-lag-exporter.enabled
+      name: kafka-lag-exporter
+      repository: https://lightbend.github.io/kafka-lag-exporter/repo/
+      version: 0.6.7
+    description: Theodolite is a framework for benchmarking the horizontal and vertical
+      scalability of cloud-native applications.
+    digest: ebf08e3bf084fcd96eb2ee0588d495258d1741c74019257e55ba40f574874525
+    home: https://www.theodolite.rocks
+    maintainers:
+    - email: soeren.henning@email.uni-kiel.de
+      name: Sören Henning
+      url: https://www.se.informatik.uni-kiel.de/en/team/soeren-henning-m-sc
+    name: theodolite
+    sources:
+    - https://github.com/cau-se/theodolite
+    type: application
+    urls:
+    - https://github.com/cau-se/theodolite/releases/download/v0.6.3/theodolite-0.6.3.tgz
+    version: 0.6.3
+  - apiVersion: v2
+    appVersion: 0.6.2
+    created: "2022-01-23T22:31:04.773793557+01:00"
+    dependencies:
+    - condition: grafana.enabled
+      name: grafana
+      repository: https://grafana.github.io/helm-charts
+      version: 6.17.5
+    - condition: kube-prometheus-stack.enabled
+      name: kube-prometheus-stack
+      repository: https://prometheus-community.github.io/helm-charts
+      version: 20.0.1
+    - condition: cp-helm-charts.enabled
+      name: cp-helm-charts
+      repository: https://soerenhenning.github.io/cp-helm-charts
+      version: 0.6.0
+    - condition: kafka-lag-exporter.enabled
+      name: kafka-lag-exporter
+      repository: https://lightbend.github.io/kafka-lag-exporter/repo/
+      version: 0.6.7
+    description: Theodolite is a framework for benchmarking the horizontal and vertical
+      scalability of cloud-native applications.
+    digest: f6514038741051230dc9be0a6bde3fbc6f92136ecb36c276343e98e550f2c6d0
+    home: https://www.theodolite.rocks
+    maintainers:
+    - email: soeren.henning@email.uni-kiel.de
+      name: Sören Henning
+      url: https://www.se.informatik.uni-kiel.de/en/team/soeren-henning-m-sc
+    name: theodolite
+    sources:
+    - https://github.com/cau-se/theodolite
+    type: application
+    urls:
+    - https://github.com/cau-se/theodolite/releases/download/v0.6.2/theodolite-0.6.2.tgz
+    version: 0.6.2
   - apiVersion: v2
     appVersion: 0.6.1
     created: "2022-01-18T10:40:00.557347616+01:00"
@@ -176,4 +246,4 @@ entries:
     urls:
     - https://github.com/cau-se/theodolite/releases/download/v0.4.0/theodolite-0.4.0.tgz
     version: 0.4.0
-generated: "2022-01-18T10:40:00.486387187+01:00"
+generated: "2022-01-24T13:40:40.036786105+01:00"
diff --git a/docs/running-benchmarks.md b/docs/running-benchmarks.md
index 7da1c7e5f8385a2818ae587b4c3ab3715a6c2bb2..0a76316c0515233f9445b363f941d60ab7aa0e06 100644
--- a/docs/running-benchmarks.md
+++ b/docs/running-benchmarks.md
@@ -143,7 +143,7 @@ The easiest way to use them is at MyBinder:
 
 Alternatively, you can also [run these notebook locally](https://github.com/cau-se/theodolite/tree/master/analysis), for example, with Docker or Visual Studio Code.
 
-The notebooks allow to compute a scalability function using its *demand* metric and to visualize multiple such functions in plots:
+The notebooks allow to compute a scalability function using Theodolite's *demand* metric and to visualize multiple such functions in plots:
 
 ### Computing the *demand* metric with `demand-metric.ipynb` (optional)
 
diff --git a/docs/theodolite-benchmarks/index.md b/docs/theodolite-benchmarks/index.md
index 9b08e6f5f2fe049c17dce819b7c4d9b83fcbc12e..30b8e816ef1b48e770c8e42be1d599a71431c976 100644
--- a/docs/theodolite-benchmarks/index.md
+++ b/docs/theodolite-benchmarks/index.md
@@ -1,12 +1,12 @@
 ---
 title: Available Benchmarks
-has_children: false
+has_children: true
 nav_order: 7
 ---
 
 # Theodolite Benchmarks
 
-Theodolite comes with 4 application benchmarks, which are based on typical use cases for stream processing within microservices. For each benchmark, a corresponding load generator is provided. Currently, Theodolite provides benchmark implementations for Apache Kafka Streams and Apache Flink.
+Theodolite comes with 4 application benchmarks, which are based on typical use cases for stream processing within microservices. For each benchmark, a corresponding [load generator](load-generator) is provided. Currently, Theodolite provides benchmark implementations for Apache Kafka Streams and Apache Flink.
 
 
 Theodolite's benchmarks are based on typical use cases for stream processing within microservices. Specifically, all benchmarks represent some sort of microservice doing Industrial Internet of Things data analytics. 
diff --git a/docs/theodolite-benchmarks/load-generator.md b/docs/theodolite-benchmarks/load-generator.md
new file mode 100644
index 0000000000000000000000000000000000000000..17845c42d47e94a5b696dee1d774890de8d6fff1
--- /dev/null
+++ b/docs/theodolite-benchmarks/load-generator.md
@@ -0,0 +1,87 @@
+---
+title: Load Generators
+parent: Available Benchmarks
+has_children: false
+nav_order: 1
+---
+
+# Load Generator Framework
+
+Theodolite's benchmarks come with a flexible load generator framework. It is used to create load on the [4 Theodolite benchmarks](#prebuilt-container-images), but can also be applied to create [custom load generators](#creating-a-custom-load-generator).
+It is particularly designed for scalability: Just spin up multiple instances of the load generator and the instances automatically divide the load to be generated among themselves.
+
+## Prebuilt container images
+
+For each benchmark, we provide a [load generator as OCI container image](https://github.com/orgs/cau-se/packages?tab=packages&q=workload-generator). These load generators simulate smart power meters in an industrial facility, which generate measurement records at a fixed rate. Records are published to an Apache Kafka topic (default) or sent as POST requests to an HTTP endpoint.
+
+You can simply run a load generator container, for example, for benchmark UC1 with:
+
+```sh
+docker run -it ghcr.io/cau-se/theodolite-uc1-workload-generator
+```
+
+### Message format
+
+Messages generated by the load generators represent a single measurement of [active power](https://en.wikipedia.org/wiki/AC_power#Active,_reactive,_apparent,_and_complex_power_in_sinusoidal_steady-state). The corresponding message type is specified as [`ActivePowerRecords`](https://github.com/cau-se/titan-ccp-common/blob/master/src/main/avro/ActivePower.avdl)
+defined with Avro. It consists of an identifier for simulated power sensor, a timestamp in epoch milliseconds and the actual measured (simulated) value in watts.
+
+When sending generated records via Apache Kafka, these records are serialized with the [Confluent Schema Registry](https://docs.confluent.io/platform/current/schema-registry).
+If the load generator is configured to send records as HTTP POST requests, records are serialized as JSON according to the following format:
+
+```json
+{
+  "identifier": "sensor-id",
+  "timestamp": 1645564942000,
+  "valueInW": 1234.56
+}
+```
+
+### Configuration
+
+The prebuilt container images can be configured with the following environment variables:
+
+| Environment Variable | Description | Default |
+|:----|:----|:----|
+| `BOOTSTRAP_SERVER` | Address (`hostname:port`) of another load generator instance to form a cluster with. Can also be this instance. | `localhost:5701` |
+| `KUBERNETES_DNS_NAME` | Kubernetes service name to discover other load generators to form a cluster with. Must be a fully qualified domain name (FQDN), e.g., something like `<service>.<namespace>.svc.cluster.local`. * Requires `BOOTSTRAP_SERVER` not to be set. | |
+| `PORT` | Port used for for coordination among load generator instances. | 5701 |
+| `PORT_AUTO_INCREMENT` | If set to true and the specified PORT is already used, use the next higher one. Useful if multiple instances should run on the same host, without configuring each instance individually. | true |
+| `CLUSTER_NAME_PREFIX` | Only required if unrelated load generators form a cluster. | theodolite-load-generation |
+| `TARGET` | The target system the load generator send messages to. Valid values are: `kafka`, `http`. | `kafka` |
+| `KAFKA_BOOTSTRAP_SERVERS` | A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. See [Kafka producer config: `bootstrap.servers`](https://kafka.apache.org/documentation/#producerconfigs_bootstrap.servers) for more information. Only used if Kafka is set as `TARGET`. | `localhost:9092` |
+| `KAFKA_INPUT_TOPIC` | Name of the Kafka topic, which should receive the generated messages. Only used if Kafka is set as `TARGET`. | input |
+| `SCHEMA_REGISTRY_URL` | URL of the [Confluent Schema Registry](https://docs.confluent.io/platform/current/schema-registry). | `http://localhost:8081` |
+| `KAFKA_BATCH_SIZE` | Value for the Kafka producer configuration: [`batch.size`](https://kafka.apache.org/documentation/#producerconfigs_batch.size). Only used if Kafka is set as `TARGET`. | see Kafka producer config: [`batch.size`](https://kafka.apache.org/documentation/#producerconfigs_batch.size) |
+| `KAFKA_LINGER_MS` | Value for the Kafka producer configuration: [`linger.ms`](https://kafka.apache.org/documentation/#producerconfigs_linger.ms). Only used if Kafka is set as `TARGET`. | see Kafka producer config: [`linger.ms`](https://kafka.apache.org/documentation/#producerconfigs_linger.ms) |
+| `KAFKA_BUFFER_MEMORY` | Value for the Kafka producer configuration: [`buffer.memory`](https://kafka.apache.org/documentation/#producerconfigs_buffer.memory) Only used if Kafka is set as `TARGET`. | see Kafka producer config: [`buffer.memory`](https://kafka.apache.org/documentation/#producerconfigs_buffer.memory) |
+| `HTTP_URL` | The URL the load generator should post messages to. Only used if HTTP is set as `TARGET`. | |
+| `NUM_SENSORS` | The amount of simulated sensors. | 10 |
+| `PERIOD_MS` | The time in milliseconds between generating two messages for the same sensor. With our Theodolite benchmarks, we apply an [open workload model](https://www.usenix.org/legacy/event/nsdi06/tech/full_papers/schroeder/schroeder.pdf) in which new messages are generated at a fixed rate, without considering the think time of the target server nor the time required for generating a message. | 1000 |
+| `VALUE` | The constant `valueInW` of an `ActivePowerRecord`. | 10 |
+| `THREADS` | Number of worker threads used to generate the load. | 4 |
+
+Please note that there are some additional configuration options for benchmark [UC4's load generator](https://github.com/cau-se/theodolite/blob/master/theodolite-benchmarks/uc4-load-generator/src/main/java/theodolite/uc4/workloadgenerator/LoadGenerator.java).
+
+## Creating a custom load generator
+
+To create a custom load generator, you need to import the [load-generator-commons](https://github.com/cau-se/theodolite/tree/master/theodolite-benchmarks/load-generator-commons) project. You can then create an instance of the `LoadGenerator` object and call its `run` method:
+
+```java
+LoadGenerator loadGenerator = new LoadGenerator()
+    .setClusterConfig(clusterConfig)
+    .setLoadDefinition(new WorkloadDefinition(
+        new KeySpace(key_prefix, numSensors),
+        duration))
+    .setGeneratorConfig(new LoadGeneratorConfig(
+        recordGenerator,
+        recordSender))
+    .withThreads(threads);
+loadGenerator.run();
+```
+
+Alternatively, you can also start with a load generator populated with a default configuration or created from environment variables and then adjust the `LoadGenerator` as desired:
+
+```java
+LoadGenerator loadGeneratorFromDefaults = LoadGenerator.fromDefaults()
+LoadGenerator loadGeneratorFromEnv = LoadGenerator.fromEnvironment();
+```
diff --git a/execution/theodolite.yaml b/execution/theodolite.yaml
index ae18a68ee61c71e20008a71537357cdf9521216a..495b98f8dfff7fb5ddfe95d71d09fc1dfff67e0e 100644
--- a/execution/theodolite.yaml
+++ b/execution/theodolite.yaml
@@ -21,17 +21,16 @@ spec:
               valueFrom:
                 fieldRef:
                   fieldPath: metadata.namespace
-
             # - name: MODE
             #   value: yaml-executor # Default is `yaml-executor`
             - name: THEODOLITE_EXECUTION
-              value: "execution/execution.yaml" # The name of this file must correspond to the filename of the execution, from which the config map is created.
+              value: "/deployments/execution/execution.yaml" # The name of this file must correspond to the filename of the execution, from which the config map is created.
             - name: THEODOLITE_BENCHMARK
-              value: "benchmark/benchmark.yaml" # The name of this file must correspond to the filename of the benchmark, from which the config map is created.
+              value: "/deployments/benchmark/benchmark.yaml" # The name of this file must correspond to the filename of the benchmark, from which the config map is created.
             - name: THEODOLITE_APP_RESOURCES
-              value: "benchmark-resources"
+              value: "/deployments/benchmark-resources"
             - name: RESULTS_FOLDER # Folder for saving results
-              value: results # Default is the pwd (/deployments)
+              value: /deployments/results # Default is the pwd (/deployments)
             # - name: CREATE_RESULTS_FOLDER # Specify whether the specified result folder should be created if it does not exist.
             #   value: "false" # Default is false.
           volumeMounts:
diff --git a/helm/templates/prometheus/datasource-config-map.yaml b/helm/templates/grafana/datasource-config-map.yaml
similarity index 100%
rename from helm/templates/prometheus/datasource-config-map.yaml
rename to helm/templates/grafana/datasource-config-map.yaml
diff --git a/helm/templates/prometheus/prometheus.yaml b/helm/templates/prometheus/prometheus.yaml
index 4e297b20290be9686b901fa8c76823136c6fabef..23a015250e19cc14550ce73e8162ba27f65be774 100644
--- a/helm/templates/prometheus/prometheus.yaml
+++ b/helm/templates/prometheus/prometheus.yaml
@@ -5,10 +5,7 @@ metadata:
   name: {{ template "theodolite.fullname" . }}-prometheus
 spec:
   serviceAccountName: {{ template "theodolite.fullname" . }}-prometheus
-  serviceMonitorSelector:
-    matchLabels:
-      #app: cp-kafka
-      appScope: titan-ccp
+  serviceMonitorSelector: {}
   resources:
     requests:
       memory: 400Mi
diff --git a/helm/templates/theodolite/theodolite-operator.yaml b/helm/templates/theodolite/theodolite-operator.yaml
index ff9c7e4de87c703af3350f7d9c797a5a53e2e675..f2669686eada049d33c5c88169d8d2ec3af84261 100644
--- a/helm/templates/theodolite/theodolite-operator.yaml
+++ b/helm/templates/theodolite/theodolite-operator.yaml
@@ -27,11 +27,18 @@ spec:
             - name: MODE
               value: operator
             - name: RESULTS_FOLDER
-              value: "./results"
+              value: "/deployments/results"
           volumeMounts:
             - name: theodolite-results-volume
               mountPath: "/deployments/results"
-        {{- if .Values.operator.sloChecker.droppedRecordsKStreams.enabled }}
+          resources:
+            requests:
+              memory: "512Mi"
+              cpu: "250m"
+            limits:
+              memory: "1024Mi"
+              cpu: "500m"
+        {{- if .Values.operator.sloChecker.generic.enabled }}
         - name: slo-checker-generic
           image: "{{ .Values.operator.sloChecker.generic.image }}:{{ .Values.operator.sloChecker.generic.imageTag }}"
           imagePullPolicy: "{{ .Values.operator.sloChecker.generic.imagePullPolicy }}"
@@ -43,6 +50,13 @@ spec:
             value: "8082"
           - name: LOG_LEVEL
             value: INFO
+          resources:
+            requests:
+              memory: "64Mi"
+              cpu: "50m"
+            limits:
+              memory: "128Mi"
+              cpu: "100m"
         {{- end }}
         {{- if .Values.operator.sloChecker.lagTrend.enabled }}
         - name: lag-trend-slo-checker
@@ -54,6 +68,13 @@ spec:
           env:
           - name: LOG_LEVEL
             value: INFO
+          resources:
+            requests:
+              memory: "64Mi"
+              cpu: "50m"
+            limits:
+              memory: "128Mi"
+              cpu: "100m"
         {{- end }}
         {{- if .Values.operator.sloChecker.droppedRecordsKStreams.enabled }}
         - name: slo-checker-dropped-records-kstreams
@@ -67,6 +88,13 @@ spec:
             value: "8081"
           - name: LOG_LEVEL
             value: INFO
+          resources:
+            requests:
+              memory: "64Mi"
+              cpu: "50m"
+            limits:
+              memory: "128Mi"
+              cpu: "100m"
         {{- end }}
         {{- if .Values.operator.resultsVolume.accessSidecar.enabled }}
         - name: results-access
diff --git a/slo-checker/dropped-records/Dockerfile b/slo-checker/dropped-records/Dockerfile
index 032b8153a6989ca04631ba553289dacb3620a38d..2cbc89a150217f15b3c4ba921050db720a34bf50 100644
--- a/slo-checker/dropped-records/Dockerfile
+++ b/slo-checker/dropped-records/Dockerfile
@@ -1,6 +1,15 @@
-FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7
+FROM python:3.8
 
-COPY requirements.txt requirements.txt
-RUN pip install -r requirements.txt
+WORKDIR /code
 
-COPY ./app /app
\ No newline at end of file
+COPY ./requirements.txt /code/requirements.txt
+RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
+
+COPY ./app /code/app
+
+WORKDIR /code/app
+
+ENV HOST 0.0.0.0
+ENV PORT 80
+
+CMD ["sh", "-c", "uvicorn main:app --host $HOST --port $PORT"]
diff --git a/slo-checker/dropped-records/requirements.txt b/slo-checker/dropped-records/requirements.txt
index 8b6c3863226c2bd5e8bcd7982b2674dee593f192..a3d5ff675d6a89b2514f1936b1a8104d13ad9b55 100644
--- a/slo-checker/dropped-records/requirements.txt
+++ b/slo-checker/dropped-records/requirements.txt
@@ -1,5 +1,6 @@
-fastapi==0.65.2
-scikit-learn==0.20.3
-pandas==1.0.3
-uvicorn
 requests
+fastapi>=0.68.0,<0.69.0
+uvicorn>=0.15.0,<0.16.0
+#pydantic>=1.8.0,<2.0.0
+#scikit-learn==0.22.2
+pandas==1.0.3
diff --git a/slo-checker/generic/Dockerfile b/slo-checker/generic/Dockerfile
index 032b8153a6989ca04631ba553289dacb3620a38d..2cbc89a150217f15b3c4ba921050db720a34bf50 100644
--- a/slo-checker/generic/Dockerfile
+++ b/slo-checker/generic/Dockerfile
@@ -1,6 +1,15 @@
-FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7
+FROM python:3.8
 
-COPY requirements.txt requirements.txt
-RUN pip install -r requirements.txt
+WORKDIR /code
 
-COPY ./app /app
\ No newline at end of file
+COPY ./requirements.txt /code/requirements.txt
+RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
+
+COPY ./app /code/app
+
+WORKDIR /code/app
+
+ENV HOST 0.0.0.0
+ENV PORT 80
+
+CMD ["sh", "-c", "uvicorn main:app --host $HOST --port $PORT"]
diff --git a/slo-checker/generic/requirements.txt b/slo-checker/generic/requirements.txt
index 87972ab01a276cbb63033e214e1ad53d38b5c8d8..a3d5ff675d6a89b2514f1936b1a8104d13ad9b55 100644
--- a/slo-checker/generic/requirements.txt
+++ b/slo-checker/generic/requirements.txt
@@ -1,4 +1,6 @@
-fastapi==0.65.2
-pandas==1.0.3
-uvicorn
 requests
+fastapi>=0.68.0,<0.69.0
+uvicorn>=0.15.0,<0.16.0
+#pydantic>=1.8.0,<2.0.0
+#scikit-learn==0.22.2
+pandas==1.0.3
diff --git a/slo-checker/record-lag/Dockerfile b/slo-checker/record-lag/Dockerfile
index 032b8153a6989ca04631ba553289dacb3620a38d..2cbc89a150217f15b3c4ba921050db720a34bf50 100644
--- a/slo-checker/record-lag/Dockerfile
+++ b/slo-checker/record-lag/Dockerfile
@@ -1,6 +1,15 @@
-FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7
+FROM python:3.8
 
-COPY requirements.txt requirements.txt
-RUN pip install -r requirements.txt
+WORKDIR /code
 
-COPY ./app /app
\ No newline at end of file
+COPY ./requirements.txt /code/requirements.txt
+RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
+
+COPY ./app /code/app
+
+WORKDIR /code/app
+
+ENV HOST 0.0.0.0
+ENV PORT 80
+
+CMD ["sh", "-c", "uvicorn main:app --host $HOST --port $PORT"]
diff --git a/slo-checker/record-lag/requirements.txt b/slo-checker/record-lag/requirements.txt
index 8b6c3863226c2bd5e8bcd7982b2674dee593f192..770498e91e3f705e98868d009518b355a19a356a 100644
--- a/slo-checker/record-lag/requirements.txt
+++ b/slo-checker/record-lag/requirements.txt
@@ -1,5 +1,6 @@
-fastapi==0.65.2
-scikit-learn==0.20.3
-pandas==1.0.3
-uvicorn
 requests
+fastapi>=0.68.0,<0.69.0
+uvicorn>=0.15.0,<0.16.0
+#pydantic>=1.8.0,<2.0.0
+scikit-learn==0.22.2
+pandas==1.0.3
diff --git a/theodolite-benchmarks/beam-commons/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/beam-commons/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..60b9977149c7b281cb2ac91ee282f73d4351e348
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,127 @@
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.correct_indentation=true
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.organize_imports=true
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=;
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.correct_indentation=true
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.organize_imports=true
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=true
+sp_cleanup.remove_redundant_type_arguments=true
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
\ No newline at end of file
diff --git a/theodolite-benchmarks/beam-commons/build.gradle b/theodolite-benchmarks/beam-commons/build.gradle
index a809f6bc4b97d8d62b807243eddecda8a5de5032..64ac2bb51ae1e6d741749a81e5c6c9e296d14d68 100644
--- a/theodolite-benchmarks/beam-commons/build.gradle
+++ b/theodolite-benchmarks/beam-commons/build.gradle
@@ -13,21 +13,19 @@ repositories {
 }
 
 dependencies {
-  // These dependencies are used internally, and not exposed to consumers on their own compile classpath.
   implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
   implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
-  implementation 'com.google.code.gson:gson:2.8.2'
-  implementation 'com.google.guava:guava:24.1-jre'
 
-  implementation('org.apache.beam:beam-sdks-java-io-kafka:2.22.0'){
+  implementation group: 'org.apache.beam', name: 'beam-sdks-java-core', version: '2.35.0'
+  implementation('org.apache.beam:beam-sdks-java-io-kafka:2.35.0'){
     exclude group: 'org.apache.kafka', module: 'kafka-clients'
   }
+  implementation ('io.confluent:kafka-streams-avro-serde:5.3.2') 
+  
   implementation group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.30'
-  implementation group: 'org.apache.beam', name: 'beam-sdks-java-core', version: '2.22.0'
 
   runtimeOnly 'org.slf4j:slf4j-api:1.7.32'
   runtimeOnly 'org.slf4j:slf4j-jdk14:1.7.32'
 
-  // Use JUnit test framework
   testImplementation 'junit:junit:4.12'
 }
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractPipeline.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractPipeline.java
index c936ce918c10f3c500cdd26f7e057cd7b6c555b6..3f04bf4373aab0394ff4574b4020065ac356724b 100644
--- a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractPipeline.java
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractPipeline.java
@@ -12,6 +12,9 @@ import org.apache.kafka.clients.consumer.ConsumerConfig;
  */
 public class AbstractPipeline extends Pipeline {
 
+  private static final String KAFKA_CONFIG_SPECIFIC_AVRO_READER = "specific.avro.reader"; // NOPMD
+  private static final String KAFKA_CONFIG_SCHEMA_REGISTRY_URL = "schema.registry.url"; // NOPMD
+
   protected final String inputTopic;
   protected final String bootstrapServer;
   // Application Configurations
@@ -21,8 +24,8 @@ public class AbstractPipeline extends Pipeline {
     super(options);
     this.config = config;
 
-    inputTopic = config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
-    bootstrapServer = config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
+    this.inputTopic = config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
+    this.bootstrapServer = config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
   }
 
   /**
@@ -32,19 +35,37 @@ public class AbstractPipeline extends Pipeline {
    */
   public Map<String, Object> buildConsumerConfig() {
     final Map<String, Object> consumerConfig = new HashMap<>();
-    consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
-        config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
-    consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
-        config
-            .getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
-    consumerConfig.put("schema.registry.url",
-        config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
-
-    consumerConfig.put("specific.avro.reader",
-        config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
-
-    final String applicationName = config.getString(ConfigurationKeys.APPLICATION_NAME);
-    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, applicationName);
+    consumerConfig.put(
+        ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
+        this.config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
+    consumerConfig.put(
+        ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
+        this.config.getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
+    consumerConfig.put(
+        KAFKA_CONFIG_SCHEMA_REGISTRY_URL,
+        this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
+    consumerConfig.put(
+        KAFKA_CONFIG_SPECIFIC_AVRO_READER,
+        this.config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
+    consumerConfig.put(
+        ConsumerConfig.GROUP_ID_CONFIG,
+        this.config.getString(ConfigurationKeys.APPLICATION_NAME));
     return consumerConfig;
   }
+
+  /**
+   * Builds a simple configuration for a Kafka producer transformation.
+   *
+   * @return the build configuration.
+   */
+  public Map<String, Object> buildProducerConfig() {
+    final Map<String, Object> config = new HashMap<>();
+    config.put(
+        KAFKA_CONFIG_SCHEMA_REGISTRY_URL,
+        this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
+    config.put(
+        KAFKA_CONFIG_SPECIFIC_AVRO_READER,
+        this.config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
+    return config;
+  }
 }
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/ActivePowerRecordDeserializer.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/ActivePowerRecordDeserializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..c53dde3d5f4b7d18822c916a637c356b898fe2cd
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/ActivePowerRecordDeserializer.java
@@ -0,0 +1,11 @@
+package theodolite.commons.beam.kafka;
+
+import io.confluent.kafka.streams.serdes.avro.SpecificAvroDeserializer;
+import org.apache.kafka.common.serialization.Deserializer;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * A Kafka {@link Deserializer} for typed Schema Registry {@link ActivePowerRecord}.
+ */
+public class ActivePowerRecordDeserializer extends SpecificAvroDeserializer<ActivePowerRecord> {
+}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerRecordReader.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerRecordReader.java
deleted file mode 100644
index f102bee41d66c251ecb66418dd3b90dced32cffb..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerRecordReader.java
+++ /dev/null
@@ -1,61 +0,0 @@
-package theodolite.commons.beam.kafka;
-
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
-import java.util.Map;
-import org.apache.beam.sdk.coders.AvroCoder;
-import org.apache.beam.sdk.io.kafka.KafkaIO;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PBegin;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import titan.ccp.model.records.ActivePowerRecord;
-
-/**
- * Simple {@link PTransform} that read from Kafka using {@link KafkaIO}.
- */
-public class KafkaActivePowerRecordReader extends
-    PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {
-
-  private static final long serialVersionUID = 2603286150183186115L;
-  private final PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> reader;
-
-
-  /**
-   * Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
-   */
-  public KafkaActivePowerRecordReader(final String bootstrapServer, final String inputTopic,
-                                      final Map<String, Object> consumerConfig) {
-    super();
-
-    if (bootstrapServer == null) {
-      throw new IllegalArgumentException("bootstrapServer is null");
-    }
-
-    if (inputTopic == null) {
-      throw new IllegalArgumentException("inputTopic is null");
-    }
-
-    // Check if boostrap server and inputTopic are defined
-    if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
-      throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
-    }
-
-
-    reader =
-        KafkaIO.<String, ActivePowerRecord>read()
-            .withBootstrapServers(bootstrapServer)
-            .withTopic(inputTopic)
-            .withKeyDeserializer(StringDeserializer.class)
-            .withValueDeserializerAndCoder((Class) KafkaAvroDeserializer.class,
-                AvroCoder.of(ActivePowerRecord.class))
-            .withConsumerConfigUpdates(consumerConfig)
-            .withoutMetadata();
-  }
-
-  @Override
-  public PCollection<KV<String, ActivePowerRecord>> expand(final PBegin input) {
-    return input.apply(this.reader);
-  }
-
-}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerTimestampReader.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerTimestampReader.java
index 732afe9a0c1d4bdfea876025fceea0c5da1310fe..7a48bd71d497f65351888425d092decf5adb05f3 100644
--- a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerTimestampReader.java
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerTimestampReader.java
@@ -1,6 +1,5 @@
 package theodolite.commons.beam.kafka;
 
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
 import java.util.Map;
 import org.apache.beam.sdk.coders.AvroCoder;
 import org.apache.beam.sdk.io.kafka.KafkaIO;
@@ -12,40 +11,37 @@ import org.apache.kafka.common.serialization.StringDeserializer;
 import titan.ccp.model.records.ActivePowerRecord;
 
 /**
- * Simple {@link PTransform} that read from Kafka using {@link KafkaIO}.
- * Has additional a TimestampPolicy.
+ * Simple {@link PTransform} that reads from Kafka using {@link KafkaIO} with event time.
  */
-public class KafkaActivePowerTimestampReader extends
-    PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {
+public class KafkaActivePowerTimestampReader
+    extends PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {
 
   private static final long serialVersionUID = 2603286150183186115L;
   private final PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> reader;
 
-
   /**
    * Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
    */
-  public KafkaActivePowerTimestampReader(final String bootstrapServer, final String inputTopic,
-                                         final Map<String, Object> consumerConfig) {
+  public KafkaActivePowerTimestampReader(
+      final String bootstrapServer,
+      final String inputTopic,
+      final Map<String, Object> consumerConfig) {
     super();
 
-    // Check if boostrap server and inputTopic are defined
+    // Check if bootstrap server and inputTopic are defined
     if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
       throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
     }
 
-    reader =
-        KafkaIO.<String, ActivePowerRecord>read()
-            .withBootstrapServers(bootstrapServer)
-            .withTopic(inputTopic)
-            .withKeyDeserializer(StringDeserializer.class)
-            .withValueDeserializerAndCoder((Class) KafkaAvroDeserializer.class,
-                AvroCoder.of(ActivePowerRecord.class))
-            .withConsumerConfigUpdates(consumerConfig)
-            // Set TimeStampPolicy for event time
-            .withTimestampPolicyFactory(
-                (tp, previousWaterMark) -> new EventTimePolicy(previousWaterMark))
-            .withoutMetadata();
+    this.reader = KafkaIO.<String, ActivePowerRecord>read().withBootstrapServers(bootstrapServer)
+        .withTopic(inputTopic).withKeyDeserializer(StringDeserializer.class)
+        .withValueDeserializerAndCoder(
+            ActivePowerRecordDeserializer.class,
+            AvroCoder.of(ActivePowerRecord.class))
+        .withConsumerConfigUpdates(consumerConfig)
+        .withTimestampPolicyFactory(
+            (tp, previousWatermark) -> new EventTimePolicy(previousWatermark))
+        .withoutMetadata();
   }
 
   @Override
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaWriterTransformation.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaWriterTransformation.java
index 0a3867e71479e36ce30a9f222dfd0a7d473bd209..6d33f6f01493c10a1eb6aca56dd309ae58ce4b8d 100644
--- a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaWriterTransformation.java
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaWriterTransformation.java
@@ -1,5 +1,6 @@
 package theodolite.commons.beam.kafka;
 
+import java.util.Map;
 import org.apache.beam.sdk.io.kafka.KafkaIO;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.values.KV;
@@ -9,23 +10,35 @@ import org.apache.kafka.common.serialization.Serializer;
 import org.apache.kafka.common.serialization.StringSerializer;
 
 /**
- * Wrapper for a Kafka writing Transformation
- * where the value type can be generic.
+ * Wrapper for a Kafka writing Transformation where the value type can be generic.
+ *
  * @param <T> type of the value.
  */
-public class KafkaWriterTransformation<T> extends
-    PTransform<PCollection<KV<String, T>>, PDone> {
+public class KafkaWriterTransformation<T> extends PTransform<PCollection<KV<String, T>>, PDone> {
 
   private static final long serialVersionUID = 3171423303843174723L;
   private final PTransform<PCollection<KV<String, T>>, PDone> writer;
 
   /**
-   * Creates a new kafka writer transformation.
+   * Creates a new Kafka writer transformation.
    */
-  public KafkaWriterTransformation(final String bootstrapServer, final String outputTopic,
-                                   final Class<? extends Serializer<T>> valueSerializer) {
+  public KafkaWriterTransformation(
+      final String bootstrapServer,
+      final String outputTopic,
+      final Class<? extends Serializer<T>> valueSerializer) {
+    this(bootstrapServer, outputTopic, valueSerializer, Map.of());
+  }
+
+  /**
+   * Creates a new Kafka writer transformation.
+   */
+  public KafkaWriterTransformation(
+      final String bootstrapServer,
+      final String outputTopic,
+      final Class<? extends Serializer<T>> valueSerializer,
+      final Map<String, Object> producerConfig) {
     super();
-    // Check if boostrap server and outputTopic are defined
+    // Check if bootstrap server and outputTopic are defined
     if (bootstrapServer.isEmpty() || outputTopic.isEmpty()) {
       throw new IllegalArgumentException("bootstrapServer or outputTopic missing");
     }
@@ -34,7 +47,8 @@ public class KafkaWriterTransformation<T> extends
         .withBootstrapServers(bootstrapServer)
         .withTopic(outputTopic)
         .withKeySerializer(StringSerializer.class)
-        .withValueSerializer(valueSerializer);
+        .withValueSerializer(valueSerializer)
+        .withProducerConfigUpdates(producerConfig);
 
   }
 
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.flink.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.flink.gradle
index eb9bda1f84c4f20568fca1498462dff9082ea1fa..02bf925f8dc29456f9eeaddeef800d2edcf837f8 100644
--- a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.flink.gradle
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.flink.gradle
@@ -3,6 +3,6 @@ plugins {
 }
 
 dependencies {
-    implementation group: 'org.apache.beam', name: 'beam-runners-flink-1.12', version: '2.27.0'
-    implementation group: 'org.apache.flink', name: 'flink-statebackend-rocksdb_2.11', version: '1.12.0'
+    implementation group: 'org.apache.beam', name: 'beam-runners-flink-1.13', version: '2.35.0'
+    implementation group: 'org.apache.flink', name: 'flink-statebackend-rocksdb_2.11', version: '1.13.0'
 }
\ No newline at end of file
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.gradle
index 41d1ae4f2bdfa358aca3fca2b91ea2b57e4c3405..4611062f1b09ff2dbad02f93b9cc7f9920c32f5e 100644
--- a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.gradle
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.gradle
@@ -18,7 +18,7 @@ repositories {
     }
 }
 
-def apacheBeamVersion =  '2.22.0' //'2.27.0' // '2.34.0'
+def apacheBeamVersion =  '2.35.0'
 
 dependencies {
     // These dependencies are used internally, and not exposed to consumers on their own compile classpath.
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.samza.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.samza.gradle
index 73e916ccc867b9b3316776192f0dab56fa0710f0..44c59317472686cae88d6992382ae081c9b64ace 100644
--- a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.samza.gradle
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.samza.gradle
@@ -3,7 +3,8 @@ plugins {
 }
 
 dependencies {
-    implementation('org.apache.beam:beam-runners-samza:2.22.0') {
+    implementation('org.apache.beam:beam-runners-samza:2.35.0') {
         exclude group: 'org.apache.samza', module: 'samza-yarn_2.11'
     }
+    implementation 'org.apache.samza:samza-kafka_2.11:1.5.0'
 }
\ No newline at end of file
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle
index f5e93dd88d2234f8a9b0d6fea880f47d652dccfa..258d1a82d002184fe96a9df19b7d99806da50d28 100644
--- a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle
@@ -20,7 +20,7 @@ shadowJar {
 tasks.distZip.enabled = false
 
 ext {
-  flinkVersion = '1.12.2'
+  flinkVersion = '1.13.5'
   scalaBinaryVersion = '2.12'
 }
 
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle
index da2d42176ac0ddc9a157f843e3268b37ac4397e2..112ac662798d5a1e41f146014dd95bdaaba3a264 100644
--- a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle
@@ -22,7 +22,7 @@ dependencies {
     // These dependencies are used internally, and not exposed to consumers on their own compile classpath.
     implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
     implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
-    implementation 'org.apache.kafka:kafka-streams:2.6.0' // enable TransformerSuppliers
+    implementation 'org.apache.kafka:kafka-streams:3.1.0'
     implementation 'com.google.code.gson:gson:2.8.2'
     implementation 'com.google.guava:guava:24.1-jre'
     implementation 'org.slf4j:slf4j-simple:1.7.25'
diff --git a/theodolite-benchmarks/config/spotbugs-exclude-filter.xml b/theodolite-benchmarks/config/spotbugs-exclude-filter.xml
index 38cd4b6914b3f0cbdeac2a22bf29ce0d7cf487ee..4bc9e048fb20e27feb793d4c6c398345cd599cff 100644
--- a/theodolite-benchmarks/config/spotbugs-exclude-filter.xml
+++ b/theodolite-benchmarks/config/spotbugs-exclude-filter.xml
@@ -5,4 +5,7 @@
     <Rank value="16" />
   </Match>
 
+  <!-- Temporary disabled due to potential false positive reported in https://github.com/spotbugs/spotbugs/issues/1923. -->
+  <Bug code="NP" />
+
 </FindBugsFilter>
\ No newline at end of file
diff --git a/theodolite-benchmarks/docker-test/README.md b/theodolite-benchmarks/docker-test/README.md
index fd1e9bf4730f897273be45a022ad2adeae1b7e6e..5d7ca3f4ac470202579f154fe8f066a246c84d23 100644
--- a/theodolite-benchmarks/docker-test/README.md
+++ b/theodolite-benchmarks/docker-test/README.md
@@ -36,3 +36,19 @@ the host, for example, from the IDE or Gradle. In such cases, the following adju
 
 You can now connect to Kafka from your host system with bootstrap server `localhost:19092` and contact the Schema
 Registry via `localhost:8081`. **Pay attention to the Kafka port, which is *19092* instead of the default one *9092*.**
+
+## Running Smoke Tests
+
+The `smoketest-runner.sh` script can be used to run a simple test for a specific Docker Compose file. You can call it with
+
+```sh
+./smoketest-runner.sh <docker-compose-dir>
+```
+
+where `<docker-compose-dir>` is the directory of a Docker-Compose file, for example, `uc2-beam-samza`. The script exists with a zero exit code in case of success and a non-zero exit code otherwise.
+
+You can also run the set of all smoke test with:
+
+```sh
+./smoketest-runner-all.sh
+```
diff --git a/theodolite-benchmarks/docker-test/smoketest-runner-all.sh b/theodolite-benchmarks/docker-test/smoketest-runner-all.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0129a485d98a90d453b284408b755986f64208de
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/smoketest-runner-all.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+find . -name 'test.sh' -type f -exec dirname {} \; |
+    sort |
+    xargs -I %s sh -c "./smoketest-runner.sh %s 1>&2; echo $?" |
+    sort |
+    awk 'BEGIN {count[0]=0; count[1]=0} {count[$1!=0]++} END {print count[0] " tests successful, " count[1] " test failed."; exit count[1]}'
diff --git a/theodolite-benchmarks/docker-test/smoketest-runner.sh b/theodolite-benchmarks/docker-test/smoketest-runner.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3167c90a567eed3cc2678c80c722dbd0b8684f1e
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/smoketest-runner.sh
@@ -0,0 +1,29 @@
+#!/bin/sh
+
+COMPOSE_FILE_PATH=$1
+echo "Run test for '$COMPOSE_FILE_PATH'."
+
+cd $COMPOSE_FILE_PATH
+docker-compose pull -q
+docker-compose up -d kafka zookeeper schema-registry
+sleep 30s
+docker-compose up -d
+sleep 5s
+docker-compose ps
+
+if test -f "./test.sh"; then
+    #timeout --foreground 3m ./test.sh
+    ./test.sh
+    RETURN=$?
+else
+    RETURN=$?
+    echo "test.sh does not exists for '$COMPOSE_FILE_PATH'." 
+fi
+if [ $RETURN -eq 0 ]; then
+    echo "Test for '$COMPOSE_FILE_PATH' has passed."
+else
+    echo "Test for '$COMPOSE_FILE_PATH' has failed."
+fi
+
+docker-compose down
+exit $RETURN
diff --git a/theodolite-benchmarks/docker-test/uc1-beam-flink/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-beam-flink/docker-compose.yml
index d8a7b946a9d5e407032ce02838b3ad02892eae73..69de6049aca50a6a8e10a9e709b38d322bc8cf61 100644
--- a/theodolite-benchmarks/docker-test/uc1-beam-flink/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc1-beam-flink/docker-compose.yml
@@ -19,21 +19,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -44,7 +49,7 @@ services:
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 10
   benchmark-jobmanager:
-      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
+      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:${THEODOLITE_TAG:-latest}
       #ports:
       #  - "8080:8081"
       command: >
@@ -62,7 +67,7 @@ services:
         - schema-registry
         - kafka
   benchmark-taskmanager:
-      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
+      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:${THEODOLITE_TAG:-latest}
       scale: 1
       command: taskmanager
       environment:
diff --git a/theodolite-benchmarks/docker-test/uc1-beam-flink/test.sh b/theodolite-benchmarks/docker-test/uc1-beam-flink/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ebbecd1c5336c5dd907db11b8c8c45924e5924a8
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-beam-flink/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+sleep 55s # to let the benchmark and produce some output
+docker-compose logs --tail 100 benchmark-taskmanager |
+    sed -n "s/^.*Key:\s\(\S*\), Value:\s\(\S*\).*$/\2/p" |
+    tee /dev/stderr |
+    jq .identifier |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml
index 11cf0c345b417fdda7cedba2f9db1342d2b64634..2212c3b539045114f31760d605ad928e237ed924 100644
--- a/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml
@@ -21,21 +21,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
-    image: ghcr.io/cau-se/theodolite-uc1-beam-samza:latest
+    image: ghcr.io/cau-se/theodolite-uc1-beam-samza:${THEODOLITE_TAG:-latest}
     scale: 1
     depends_on:
       - schema-registry
@@ -47,7 +52,7 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
diff --git a/theodolite-benchmarks/docker-test/uc1-beam-samza/test.sh b/theodolite-benchmarks/docker-test/uc1-beam-samza/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ed17db3a44d5c4a8dacfbc956c2f36dd47503508
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-beam-samza/test.sh
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+sleep 55s # to let the benchmark and produce some output
+docker-compose logs --tail 100 benchmark |
+    sed -n "s/^.*Key:\s\(\S*\), Value:\s\(\S*\).*$/\2/p" |
+    tee /dev/stderr |
+    jq .identifier |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
+
+
diff --git a/theodolite-benchmarks/docker-test/uc1-flink-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-flink/docker-compose.yml
similarity index 78%
rename from theodolite-benchmarks/docker-test/uc1-flink-docker-compose/docker-compose.yml
rename to theodolite-benchmarks/docker-test/uc1-flink/docker-compose.yml
index 5a252f07e23205cf20390230ec956240ad2dc7a6..55e359665de56cb03a5049c299761cb07690df30 100755
--- a/theodolite-benchmarks/docker-test/uc1-flink-docker-compose/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc1-flink/docker-compose.yml
@@ -1,4 +1,4 @@
-version: '2'
+version: '2.2'
 services:
   zookeeper:
     image: confluentinc/cp-zookeeper
@@ -19,21 +19,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -44,7 +49,7 @@ services:
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 10
   benchmark-jobmanager:
-    image: ghcr.io/cau-se/theodolite-uc1-flink:latest
+    image: ghcr.io/cau-se/theodolite-uc1-flink:${THEODOLITE_TAG:-latest}
     #ports:
     #  - "8080:8081"
     command: standalone-job --job-classname theodolite.uc1.application.HistoryServiceFlinkJob
@@ -59,7 +64,7 @@ services:
       - schema-registry
       - kafka
   benchmark-taskmanager:
-    image: ghcr.io/cau-se/theodolite-uc1-flink:latest
+    image: ghcr.io/cau-se/theodolite-uc1-flink:${THEODOLITE_TAG:-latest}
     command: taskmanager
     environment:
       - |
diff --git a/theodolite-benchmarks/docker-test/uc1-flink/test.sh b/theodolite-benchmarks/docker-test/uc1-flink/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..7c7f11a94f42d56d91d383f27d58ad9a09a918e5
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-flink/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+sleep 55s # to let the benchmark and produce some output
+docker-compose logs --tail 100 benchmark-taskmanager |
+    sed -n "s/^.*Record:\s\(\S*\)$/\1/p" |
+    tee /dev/stderr |
+    jq .identifier |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc1-kstreams-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml
similarity index 79%
rename from theodolite-benchmarks/docker-test/uc1-kstreams-docker-compose/docker-compose.yml
rename to theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml
index 88ffadfcf3ce7e372fad1e3cbf28cc3aa847756d..c85ce305c2f1383a77d4c405d52089ec1d2b02a6 100755
--- a/theodolite-benchmarks/docker-test/uc1-kstreams-docker-compose/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml
@@ -1,4 +1,4 @@
-version: '2'
+version: '2.2'
 services:
   zookeeper:
     image: confluentinc/cp-zookeeper
@@ -19,19 +19,24 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
     image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:latest
     depends_on:
@@ -41,7 +46,7 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
diff --git a/theodolite-benchmarks/docker-test/uc1-kstreams/test.sh b/theodolite-benchmarks/docker-test/uc1-kstreams/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..da711e3fac1b1d664b1c8487687ceacdddce6efa
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-kstreams/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+sleep 55s # to let the benchmark and produce some output
+docker-compose logs --tail 100 benchmark |
+    sed -n "s/^.*Record:\s\(\S*\)$/\1/p" |
+    tee /dev/stderr |
+    jq .identifier |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc2-beam-flink/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-beam-flink/docker-compose.yml
index f8bdfae935a55c8cb60e3fb22b19c471832ca9f4..79f6f040144d3c4da56d469095ad3ddce90026a2 100644
--- a/theodolite-benchmarks/docker-test/uc2-beam-flink/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc2-beam-flink/docker-compose.yml
@@ -19,21 +19,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -44,7 +49,7 @@ services:
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 10
   benchmark-jobmanager:
-      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
+      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:${THEODOLITE_TAG:-latest}
       #ports:
       #  - "8080:8081"
       command: >
@@ -62,7 +67,7 @@ services:
         - schema-registry
         - kafka
   benchmark-taskmanager:
-      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
+      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:${THEODOLITE_TAG:-latest}
       scale: 1
       command: taskmanager
       environment:
diff --git a/theodolite-benchmarks/docker-test/uc2-beam-flink/test.sh b/theodolite-benchmarks/docker-test/uc2-beam-flink/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3c33834b0b21bc5dbe4e6a7c3ff947121bb2ce71
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-beam-flink/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 20 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml
index 67a5997b66833e33696592285dffe24b03b3d210..cc6bc7a7112c35f11ce9cfd27d09aebe401c8c51 100644
--- a/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml
@@ -21,21 +21,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
-    image: ghcr.io/cau-se/theodolite-uc2-beam-samza:latest
+    image: ghcr.io/cau-se/theodolite-uc2-beam-samza:${THEODOLITE_TAG:-latest}
     scale: 1
     depends_on:
       - schema-registry
@@ -47,7 +52,7 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
diff --git a/theodolite-benchmarks/docker-test/uc2-beam-samza/test.sh b/theodolite-benchmarks/docker-test/uc2-beam-samza/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3c33834b0b21bc5dbe4e6a7c3ff947121bb2ce71
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-beam-samza/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 20 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc2-flink-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-flink/docker-compose.yml
similarity index 76%
rename from theodolite-benchmarks/docker-test/uc2-flink-docker-compose/docker-compose.yml
rename to theodolite-benchmarks/docker-test/uc2-flink/docker-compose.yml
index f7047a7af7d0b613dd128b9d8d4d9fffd22b4692..1397c8ab0e7bb8d7ee67b7fa9c18143c404fd9ff 100755
--- a/theodolite-benchmarks/docker-test/uc2-flink-docker-compose/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc2-flink/docker-compose.yml
@@ -1,4 +1,4 @@
-version: '2'
+version: '2.2'
 services:
   zookeeper:
     image: confluentinc/cp-zookeeper
@@ -19,21 +19,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
-    #ports:
-    #  - "8081:8081"
+    restart: "on-failure"
     expose:
       - "8081"
+    #ports:
+    #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -44,7 +49,7 @@ services:
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 10  
   benchmark-jobmanager:
-    image: ghcr.io/cau-se/theodolite-uc2-flink:latest
+    image: ghcr.io/cau-se/theodolite-uc2-flink:${THEODOLITE_TAG:-latest}
     #ports:
     #  - "8080:8081"
     command: standalone-job --job-classname theodolite.uc2.application.HistoryServiceFlinkJob
@@ -59,7 +64,7 @@ services:
       - schema-registry
       - kafka
   benchmark-taskmanager:
-    image: ghcr.io/cau-se/theodolite-uc2-flink:latest
+    image: ghcr.io/cau-se/theodolite-uc2-flink:${THEODOLITE_TAG:-latest}
     command: taskmanager
     environment:
       - |
diff --git a/theodolite-benchmarks/docker-test/uc2-flink/test.sh b/theodolite-benchmarks/docker-test/uc2-flink/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3c33834b0b21bc5dbe4e6a7c3ff947121bb2ce71
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-flink/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 20 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc2-kstreams-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-kstreams/docker-compose.yml
similarity index 74%
rename from theodolite-benchmarks/docker-test/uc2-kstreams-docker-compose/docker-compose.yml
rename to theodolite-benchmarks/docker-test/uc2-kstreams/docker-compose.yml
index 89f2633b390b08a3a18128e98f261cc264e2b41d..efdba90bef634bab76012316f67b0f9be9f79c77 100755
--- a/theodolite-benchmarks/docker-test/uc2-kstreams-docker-compose/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc2-kstreams/docker-compose.yml
@@ -1,4 +1,4 @@
-version: '2'
+version: '2.2'
 services:
   zookeeper:
     image: confluentinc/cp-zookeeper
@@ -19,21 +19,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
-    #ports:
-    #  - "8081:8081"
+    restart: "on-failure"
     expose:
       - "8081"
+    #ports:
+    #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
-    image: ghcr.io/cau-se/theodolite-uc2-kstreams-app:latest
+    image: ghcr.io/cau-se/theodolite-uc2-kstreams-app:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -42,7 +47,7 @@ services:
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       KAFKA_WINDOW_DURATION_MINUTES: 60
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
diff --git a/theodolite-benchmarks/docker-test/uc2-kstreams/test.sh b/theodolite-benchmarks/docker-test/uc2-kstreams/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3c33834b0b21bc5dbe4e6a7c3ff947121bb2ce71
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-kstreams/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 20 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc3-beam-flink/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-beam-flink/docker-compose.yml
index 9a18ab364463a985b40cd691f6232b9b47ae412e..e68dc94d4185d789272279e8fa2e5d178ffdd14b 100644
--- a/theodolite-benchmarks/docker-test/uc3-beam-flink/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc3-beam-flink/docker-compose.yml
@@ -19,21 +19,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -44,7 +49,7 @@ services:
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 10
   benchmark-jobmanager:
-      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
+      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:${THEODOLITE_TAG:-latest}
       #ports:
       #  - "8080:8081"
       command: >
@@ -64,7 +69,7 @@ services:
         - schema-registry
         - kafka
   benchmark-taskmanager:
-      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
+      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:${THEODOLITE_TAG:-latest}
       scale: 1
       command: taskmanager
       environment:
diff --git a/theodolite-benchmarks/docker-test/uc3-beam-flink/test.sh b/theodolite-benchmarks/docker-test/uc3-beam-flink/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0612838a3973a302c9acb3bbfa6b8d59ea2596c5
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-beam-flink/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 600 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml
index a50b32bd8f78678d63f06688821d6dfb5f133138..bf120f31dbfda2384b314ba4a90a25362f37b6c2 100644
--- a/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml
@@ -21,21 +21,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
-    image: ghcr.io/cau-se/theodolite-uc3-beam-samza:latest
+    image: ghcr.io/cau-se/theodolite-uc3-beam-samza:${THEODOLITE_TAG:-latest}
     scale: 1
     depends_on:
       - schema-registry
@@ -47,7 +52,7 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
diff --git a/theodolite-benchmarks/docker-test/uc3-beam-samza/test.sh b/theodolite-benchmarks/docker-test/uc3-beam-samza/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0612838a3973a302c9acb3bbfa6b8d59ea2596c5
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-beam-samza/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 600 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc3-flink-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-flink/docker-compose.yml
similarity index 76%
rename from theodolite-benchmarks/docker-test/uc3-flink-docker-compose/docker-compose.yml
rename to theodolite-benchmarks/docker-test/uc3-flink/docker-compose.yml
index c2b8d7ad436301138acdf8dfae1654e2feb9b9bb..0c83d8a25d19d4989f982d251893635ea1222cb0 100755
--- a/theodolite-benchmarks/docker-test/uc3-flink-docker-compose/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc3-flink/docker-compose.yml
@@ -1,4 +1,4 @@
-version: '2'
+version: '2.2'
 services:
   zookeeper:
     image: confluentinc/cp-zookeeper
@@ -19,21 +19,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
-    #ports:
-    #  - "8081:8081"
+    restart: "on-failure"
     expose:
       - "8081"
+    #ports:
+    #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -44,7 +49,7 @@ services:
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 10
   benchmark-jobmanager:
-    image: ghcr.io/cau-se/theodolite-uc3-flink:latest
+    image: ghcr.io/cau-se/theodolite-uc3-flink:${THEODOLITE_TAG:-latest}
     #ports:
     #  - "8080:8081"
     command: standalone-job --job-classname theodolite.uc3.application.HistoryServiceFlinkJob
@@ -59,7 +64,7 @@ services:
       - schema-registry
       - kafka
   benchmark-taskmanager:
-    image: ghcr.io/cau-se/theodolite-uc3-flink:latest
+    image: ghcr.io/cau-se/theodolite-uc3-flink:${THEODOLITE_TAG:-latest}
     command: taskmanager
     environment:
       - |
diff --git a/theodolite-benchmarks/docker-test/uc3-kstreams-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-kstreams/docker-compose.yml
similarity index 73%
rename from theodolite-benchmarks/docker-test/uc3-kstreams-docker-compose/docker-compose.yml
rename to theodolite-benchmarks/docker-test/uc3-kstreams/docker-compose.yml
index 65b0a3467e123a84d0e719d8702749ed33773aea..7da1fa3b420e2dfa0d6d357723583bc3a256823e 100755
--- a/theodolite-benchmarks/docker-test/uc3-kstreams-docker-compose/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc3-kstreams/docker-compose.yml
@@ -1,4 +1,4 @@
-version: '2'
+version: '2.2'
 services:
   zookeeper:
     image: confluentinc/cp-zookeeper
@@ -19,21 +19,26 @@ services:
       KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
-      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
-    #ports:
-    #  - "8081:8081"
+    restart: "on-failure"
     expose:
       - "8081"
+    #ports:
+    #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
-    image: ghcr.io/cau-se/theodolite-uc3-kstreams-app:latest
+    image: ghcr.io/cau-se/theodolite-uc3-kstreams-app:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -41,7 +46,7 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
diff --git a/theodolite-benchmarks/docker-test/uc3-kstreams/test.sh b/theodolite-benchmarks/docker-test/uc3-kstreams/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0612838a3973a302c9acb3bbfa6b8d59ea2596c5
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-kstreams/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 600 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc4-beam-flink/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-beam-flink/docker-compose.yml
index 5169ac551952f992d98c74f7d65d5378ecdcc2a5..2968b718eb06ade4d42bbe104e214d573881ac14 100644
--- a/theodolite-benchmarks/docker-test/uc4-beam-flink/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc4-beam-flink/docker-compose.yml
@@ -20,20 +20,25 @@ services:
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
       KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -43,9 +48,9 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 4
-      NUM_NESTED_GROUPS: 4
+      NUM_NESTED_GROUPS: 3
   benchmark-jobmanager:
-      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:latest
+      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:${THEODOLITE_TAG:-latest}
       #ports:
       #  - "8080:8081"
       command: >
@@ -66,7 +71,7 @@ services:
         - schema-registry
         - kafka
   benchmark-taskmanager:
-      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:latest
+      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:${THEODOLITE_TAG:-latest}
       scale: 1
       command: taskmanager
       environment:
diff --git a/theodolite-benchmarks/docker-test/uc4-beam-flink/test.sh b/theodolite-benchmarks/docker-test/uc4-beam-flink/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..6a4c6dbf4f583e7598baefae8f48136bb2113630
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-beam-flink/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=avro -r http://schema-registry:8081 -f '%k:%s\n' -c 2000 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b21\b"
diff --git a/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml
index bded9d5d227d0f62cb6cb3f9edac3df383ea3e8a..d236af7d284ebb085c78110feb6001cb28d18290 100644
--- a/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml
@@ -22,20 +22,25 @@ services:
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
       KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
-    image: ghcr.io/cau-se/theodolite-uc4-beam-samza:latest
+    image: ghcr.io/cau-se/theodolite-uc4-beam-samza:${THEODOLITE_TAG:-latest}
     scale: 1
     depends_on:
       - schema-registry
@@ -47,7 +52,7 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -57,4 +62,4 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 4
-      NUM_NESTED_GROUPS: 4
+      NUM_NESTED_GROUPS: 3
diff --git a/theodolite-benchmarks/docker-test/uc4-beam-samza/test.sh b/theodolite-benchmarks/docker-test/uc4-beam-samza/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..6a4c6dbf4f583e7598baefae8f48136bb2113630
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-beam-samza/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=avro -r http://schema-registry:8081 -f '%k:%s\n' -c 2000 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b21\b"
diff --git a/theodolite-benchmarks/docker-test/uc4-flink-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-flink/docker-compose.yml
similarity index 79%
rename from theodolite-benchmarks/docker-test/uc4-flink-docker-compose/docker-compose.yml
rename to theodolite-benchmarks/docker-test/uc4-flink/docker-compose.yml
index 0f7e4e656dede1aad3342fb79816e3ebf88e84d8..24e5acdf94d1ba3d5c2807d172ba054309a2ef4a 100755
--- a/theodolite-benchmarks/docker-test/uc4-flink-docker-compose/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc4-flink/docker-compose.yml
@@ -1,4 +1,4 @@
-version: '2'
+version: '2.2'
 services:
   zookeeper:
     image: confluentinc/cp-zookeeper
@@ -20,20 +20,25 @@ services:
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
       KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -43,9 +48,9 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 4
-      NUM_NESTED_GROUPS: 4
+      NUM_NESTED_GROUPS: 3
   benchmark-jobmanager:
-    image: ghcr.io/cau-se/theodolite-uc4-flink:latest
+    image: ghcr.io/cau-se/theodolite-uc4-flink:${THEODOLITE_TAG:-latest}
     #ports:
     #  - "8080:8081"
     command: standalone-job --job-classname theodolite.uc4.application.AggregationServiceFlinkJob
@@ -60,7 +65,7 @@ services:
       - schema-registry
       - kafka
   benchmark-taskmanager:
-    image: ghcr.io/cau-se/theodolite-uc4-flink:latest
+    image: ghcr.io/cau-se/theodolite-uc4-flink:${THEODOLITE_TAG:-latest}
     command: taskmanager
     environment:
       - |
diff --git a/theodolite-benchmarks/docker-test/uc4-flink/test.sh b/theodolite-benchmarks/docker-test/uc4-flink/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0a478cf6641a1333f65281ae43cb525e32cb2510
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-flink/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=avro -r http://schema-registry:8081 -f '%k:%s\n' -c 500 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b21\b"
diff --git a/theodolite-benchmarks/docker-test/uc4-kstreams-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-kstreams/docker-compose.yml
similarity index 76%
rename from theodolite-benchmarks/docker-test/uc4-kstreams-docker-compose/docker-compose.yml
rename to theodolite-benchmarks/docker-test/uc4-kstreams/docker-compose.yml
index 5fca44708006d1fae3ae2f9f46b5c42f6431fc3a..6aaa02990841547edb6059e4e2fbf3b28b50985c 100755
--- a/theodolite-benchmarks/docker-test/uc4-kstreams-docker-compose/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc4-kstreams/docker-compose.yml
@@ -1,4 +1,4 @@
-version: '2'
+version: '2.2'
 services:
   zookeeper:
     image: confluentinc/cp-zookeeper
@@ -20,20 +20,25 @@ services:
       KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
       KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
       KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
   schema-registry:
-    image: confluentinc/cp-schema-registry:5.3.1
+    image: confluentinc/cp-schema-registry:7.0.1
     depends_on:
       - zookeeper
       - kafka
+    restart: "on-failure"
     expose:
       - "8081"
     #ports:
     #  - 8081:8081
     environment:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
-      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
-    image: ghcr.io/cau-se/theodolite-uc4-kstreams-app:latest
+    image: ghcr.io/cau-se/theodolite-uc4-kstreams-app:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -41,7 +46,7 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
   load-generator: 
-    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
+    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
@@ -51,4 +56,4 @@ services:
       KAFKA_BOOTSTRAP_SERVERS: kafka:9092
       SCHEMA_REGISTRY_URL: http://schema-registry:8081
       NUM_SENSORS: 4
-      NUM_NESTED_GROUPS: 4
\ No newline at end of file
+      NUM_NESTED_GROUPS: 3
\ No newline at end of file
diff --git a/theodolite-benchmarks/docker-test/uc4-kstreams/test.sh b/theodolite-benchmarks/docker-test/uc4-kstreams/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9b9dee7dc78e7a587b9f2e5b778066e5bc099755
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-kstreams/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=avro -r http://schema-registry:8081 -f '%k:%s\n' -c 32 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b16\b"
diff --git a/theodolite-benchmarks/flink-commons/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/flink-commons/.settings/org.eclipse.jdt.ui.prefs
index 66b402b58f39b79066638ce679c27c0378d5be54..174249a98f9d91ce2cbf2bb64b27c09b37f05d9f 100644
--- a/theodolite-benchmarks/flink-commons/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/flink-commons/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/flink-commons/build.gradle b/theodolite-benchmarks/flink-commons/build.gradle
index a3a4a35752006bb10e15ff508ce0b37f70adc57d..bd07f1ca51ae6b781a92ae06ba8b2555fa83b873 100644
--- a/theodolite-benchmarks/flink-commons/build.gradle
+++ b/theodolite-benchmarks/flink-commons/build.gradle
@@ -3,7 +3,7 @@ plugins {
 }
 
 ext {
-    flinkVersion = '1.12.0'
+    flinkVersion = '1.13.5'
     scalaBinaryVersion = '2.12'
 }
 
diff --git a/theodolite-benchmarks/http-bridge/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/http-bridge/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..a375cb792eeb842ecfd1f789fbf6a716df43e9c8
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,127 @@
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.correct_indentation=true
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.organize_imports=true
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=;
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.correct_indentation=true
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.organize_imports=true
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=true
+sp_cleanup.remove_redundant_type_arguments=true
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
diff --git a/theodolite-benchmarks/http-bridge/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/http-bridge/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..87860c815222845c1d264d7d0ce498d3397f8280
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=true
diff --git a/theodolite-benchmarks/http-bridge/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/http-bridge/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..efbcb8c9e5d449194a48ca1ea42b7d807b573db9
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=true
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/http-bridge/Dockerfile b/theodolite-benchmarks/http-bridge/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..b31dbcdef48f3b9eadf81a35c95e441c4b54955b
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/Dockerfile
@@ -0,0 +1,6 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/http-bridge.tar /
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /http-bridge/bin/http-bridge
\ No newline at end of file
diff --git a/theodolite-benchmarks/http-bridge/build.gradle b/theodolite-benchmarks/http-bridge/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..0377eefc76b456d8e0f94087b06d0c2689f977cf
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/build.gradle
@@ -0,0 +1,31 @@
+plugins {
+  // common java conventions
+  id 'theodolite.java-conventions'
+
+  // make executable
+  id 'application'
+}
+
+tasks.distZip.enabled = false
+
+repositories {
+  mavenCentral()
+  maven {
+    url "https://oss.sonatype.org/content/repositories/snapshots/"
+  }
+  maven {
+      url 'https://packages.confluent.io/maven/'
+  }
+}
+
+dependencies {
+  implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
+  implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
+  implementation project(':load-generator-commons')
+  
+  implementation 'io.javalin:javalin:4.3.0'
+  implementation 'com.google.code.gson:gson:2.8.2'
+  runtimeOnly 'org.slf4j:slf4j-simple:1.7.25'
+
+  testImplementation 'junit:junit:4.12'
+}
diff --git a/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/Deserializer.java b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/Deserializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..f25c120d2165c4a1f747fdba32de43d4e4d157a6
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/Deserializer.java
@@ -0,0 +1,13 @@
+package theodolite.commons.httpbridge;
+
+/**
+ * A class for converting objects to strings.
+ *
+ * @param <T> Type to be deserialized from.
+ */
+@FunctionalInterface
+public interface Deserializer<T> {
+
+  T deserialize(String json);
+
+}
diff --git a/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/Endpoint.java b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/Endpoint.java
new file mode 100644
index 0000000000000000000000000000000000000000..43850d80699a0db0b0fcebd76f625a17f8133f30
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/Endpoint.java
@@ -0,0 +1,52 @@
+package theodolite.commons.httpbridge;
+
+import theodolite.commons.workloadgeneration.RecordSender;
+
+/**
+ * Class describing an endpoint of the {@link HttpBridge}, which converts JSON objects to Java
+ * objects and sends them using a {@link RecordSender}.
+ *
+ * @param <T> Type of objects this endpoint receives and converts.
+ */
+public class Endpoint<T> {
+
+  private final String path;
+
+  private final Deserializer<? extends T> recordDeserializer;
+
+  private final RecordSender<? super T> recordSender;
+
+  /**
+   * Create a new {@link Endpoint} at the given path.
+   */
+  public Endpoint(
+      final String path,
+      final Deserializer<? extends T> recordDeserializer,
+      final RecordSender<? super T> recordSender) {
+    this.path = path;
+    this.recordDeserializer = recordDeserializer;
+    this.recordSender = recordSender;
+  }
+
+  /**
+   * Create a new {@link Endpoint} at the given path with a {@link GsonDeserializer}.
+   */
+  public Endpoint(
+      final String path,
+      final Class<T> recordType,
+      final RecordSender<? super T> recordSender) {
+    this.path = path;
+    this.recordDeserializer = new GsonDeserializer<>(recordType);
+    this.recordSender = recordSender;
+  }
+
+  public String getPath() {
+    return this.path;
+  }
+
+  public void convert(final String json) {
+    final T record = this.recordDeserializer.deserialize(json);
+    this.recordSender.send(record);
+  }
+
+}
diff --git a/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/EnvVarHttpBridgeFactory.java b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/EnvVarHttpBridgeFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..97956b8d47185c90efdc03393c03c8c44aea2335
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/EnvVarHttpBridgeFactory.java
@@ -0,0 +1,59 @@
+package theodolite.commons.httpbridge;
+
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import theodolite.commons.workloadgeneration.ConfigurationKeys;
+import theodolite.commons.workloadgeneration.TitanKafkaSenderFactory;
+import titan.ccp.model.records.ActivePowerRecord;
+
+class EnvVarHttpBridgeFactory {
+
+  private static final String PORT_KEY = "PORT";
+  private static final int PORT_DEFAULT = 8080;
+
+  private static final String HOST_KEY = "HOST";
+  private static final String HOST_DEFAULT = "0.0.0.0"; // NOPMD
+
+  private static final String KAFKA_BOOTSTRAP_SERVERS_DEFAULT = "localhost:9092"; // NOPMD
+  private static final String KAFKA_TOPIC_DEFAULT = "input";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+
+  public HttpBridge create() {
+    final Endpoint<?> converter = new Endpoint<>(
+        "/",
+        ActivePowerRecord.class,
+        TitanKafkaSenderFactory.forKafkaConfig(
+            this.getKafkaBootstrapServer(),
+            this.getKafkaTopic(),
+            this.getSchemaRegistryUrl()));
+    return new HttpBridge(this.getHost(), this.getPort(), List.of(converter));
+  }
+
+  private String getHost() {
+    return Objects.requireNonNullElse(System.getenv(HOST_KEY), HOST_DEFAULT);
+  }
+
+  private int getPort() {
+    return Optional.ofNullable(System.getenv(PORT_KEY)).map(Integer::parseInt).orElse(PORT_DEFAULT);
+  }
+
+  private String getKafkaBootstrapServer() {
+    return Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        KAFKA_BOOTSTRAP_SERVERS_DEFAULT);
+  }
+
+  private String getKafkaTopic() {
+    return Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        KAFKA_TOPIC_DEFAULT);
+  }
+
+  private String getSchemaRegistryUrl() {
+    return Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        SCHEMA_REGISTRY_URL_DEFAULT);
+  }
+
+}
diff --git a/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/GsonDeserializer.java b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/GsonDeserializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..42220617546527157d5463d6b9ce9208abc66d58
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/GsonDeserializer.java
@@ -0,0 +1,29 @@
+package theodolite.commons.httpbridge;
+
+import com.google.gson.Gson;
+
+/**
+ * A {@link Deserializer} based on GSON.
+ *
+ * @param <T> Type to be serialized from.
+ */
+public class GsonDeserializer<T> implements Deserializer<T> {
+
+  private final Gson gson;
+  private final Class<T> targetClass;
+
+  public GsonDeserializer(final Class<T> targetClass) {
+    this(new Gson(), targetClass);
+  }
+
+  public GsonDeserializer(final Gson gson, final Class<T> targetClass) {
+    this.gson = gson;
+    this.targetClass = targetClass;
+  }
+
+  @Override
+  public T deserialize(final String json) {
+    return this.gson.fromJson(json, this.targetClass);
+  }
+
+}
diff --git a/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/HttpBridge.java b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/HttpBridge.java
new file mode 100644
index 0000000000000000000000000000000000000000..d36e191c8b0a591107de796f511aa853063dff73
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/HttpBridge.java
@@ -0,0 +1,40 @@
+package theodolite.commons.httpbridge;
+
+import java.util.List;
+import theodolite.commons.workloadgeneration.RecordSender;
+
+/**
+ * Class that creates a webserver with potentially multiple {@link Endpoint}s, which receives JSON
+ * objects at these endpoints, converts them to Java objects and send them using
+ * {@link RecordSender}s.
+ */
+public class HttpBridge {
+
+  private final JavalinWebServer webServer;
+
+  public HttpBridge(final String host, final int port, final List<Endpoint<?>> converters) {
+    this.webServer = new JavalinWebServer(converters, host, port);
+  }
+
+  public void start() {
+    this.webServer.start();
+  }
+
+  public void stop() {
+    this.webServer.stop();
+  }
+
+  public void runAsStandalone() {
+    Runtime.getRuntime().addShutdownHook(new Thread(() -> this.stop()));
+    this.start();
+  }
+
+  public static HttpBridge fromEnvironment() {
+    return new EnvVarHttpBridgeFactory().create();
+  }
+
+  public static void main(final String[] args) {
+    HttpBridge.fromEnvironment().runAsStandalone();
+  }
+
+}
diff --git a/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/JavalinWebServer.java b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/JavalinWebServer.java
new file mode 100644
index 0000000000000000000000000000000000000000..c23a17588d661fc5d1c6e9eb294d2d29fc165675
--- /dev/null
+++ b/theodolite-benchmarks/http-bridge/src/main/java/theodolite/commons/httpbridge/JavalinWebServer.java
@@ -0,0 +1,53 @@
+package theodolite.commons.httpbridge;
+
+import io.javalin.Javalin;
+import java.util.Collection;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Implementation of a webserver based on the Javalin framework.
+ */
+public class JavalinWebServer {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(JavalinWebServer.class);
+
+  private static final int HTTP_SUCCESS = 200;
+
+  private final Javalin app = Javalin.create();
+
+  private final String host;
+  private final int port;
+
+  /**
+   * Create a new instance, running on the specified host and port with the configured endpoints.
+   */
+  public JavalinWebServer(
+      final Collection<Endpoint<?>> converters,
+      final String host,
+      final int port) {
+    this.host = host;
+    this.port = port;
+    this.configureRoutes(converters);
+  }
+
+  private void configureRoutes(final Collection<Endpoint<?>> endpoints) {
+    for (final Endpoint<?> endpoint : endpoints) {
+      this.app.post(endpoint.getPath(), ctx -> {
+        final String record = ctx.body();
+        LOGGER.debug("Received record at '{}': {}", ctx.path(), record);
+        endpoint.convert(record);
+        ctx.status(HTTP_SUCCESS);
+      });
+    }
+  }
+
+  public void start() {
+    this.app.start(this.host, this.port);
+  }
+
+  public void stop() {
+    this.app.close();
+  }
+
+}
diff --git a/theodolite-benchmarks/kstreams-commons/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/kstreams-commons/.settings/org.eclipse.jdt.ui.prefs
index 98b5ca8064a352aacfe2aebd13fbd0a87735fc3e..713419c8d3d74d3bd7fd05c3e839367753fcdee0 100644
--- a/theodolite-benchmarks/kstreams-commons/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/kstreams-commons/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/kstreams-commons/build.gradle b/theodolite-benchmarks/kstreams-commons/build.gradle
index 7683ffe39314ec375eda0ed4e139d618d44a7328..167a75327b251af20b1142fe42c82b3bbedfe62b 100644
--- a/theodolite-benchmarks/kstreams-commons/build.gradle
+++ b/theodolite-benchmarks/kstreams-commons/build.gradle
@@ -17,7 +17,7 @@ dependencies {
   // implementation 'org.slf4j:slf4j-simple:1.7.25'
   implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
   implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
-  implementation 'org.apache.kafka:kafka-streams:2.6.0'
+  implementation 'org.apache.kafka:kafka-streams:3.1.0'
 
   // Use JUnit test framework
   testImplementation 'junit:junit:4.12'
diff --git a/theodolite-benchmarks/kstreams-commons/src/main/java/theodolite/commons/kafkastreams/KafkaStreamsBuilder.java b/theodolite-benchmarks/kstreams-commons/src/main/java/theodolite/commons/kafkastreams/KafkaStreamsBuilder.java
index 89bd3147f0d3bb7a5fecc5d8c7d277bd294494ad..fe3cf484a81ee3561ad17b6b25d218cd011f2d5d 100644
--- a/theodolite-benchmarks/kstreams-commons/src/main/java/theodolite/commons/kafkastreams/KafkaStreamsBuilder.java
+++ b/theodolite-benchmarks/kstreams-commons/src/main/java/theodolite/commons/kafkastreams/KafkaStreamsBuilder.java
@@ -70,18 +70,15 @@ public abstract class KafkaStreamsBuilder {
 
     // optional configurations
     this.setOptionalProperty(propBuilder, StreamsConfig.ACCEPTABLE_RECOVERY_LAG_CONFIG,
-        this.config::getLong,
-        p -> p >= 0);
+        this.config::getLong, p -> p >= 0);
     this.setOptionalProperty(propBuilder, StreamsConfig.BUFFERED_RECORDS_PER_PARTITION_CONFIG,
         this.config::getInt, p -> p > 0);
     this.setOptionalProperty(propBuilder, StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG,
-        this.config::getInt,
-        p -> p >= 0);
+        this.config::getInt, p -> p >= 0);
     this.setOptionalProperty(propBuilder, StreamsConfig.COMMIT_INTERVAL_MS_CONFIG,
         this.config::getInt, p -> p >= 0);
     this.setOptionalProperty(propBuilder, StreamsConfig.MAX_TASK_IDLE_MS_CONFIG,
-        this.config::getLong,
-        p -> p >= 0);
+        this.config::getLong, p -> p >= 0);
     this.setOptionalProperty(propBuilder, StreamsConfig.MAX_WARMUP_REPLICAS_CONFIG,
         this.config::getInt, p -> p >= 1);
     this.setOptionalProperty(propBuilder, StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG,
@@ -89,22 +86,28 @@ public abstract class KafkaStreamsBuilder {
     this.setOptionalProperty(propBuilder, StreamsConfig.NUM_STREAM_THREADS_CONFIG,
         this.config::getInt, p -> p > 0);
     this.setOptionalProperty(propBuilder, StreamsConfig.POLL_MS_CONFIG,
-        this.config::getLong,
-        p -> p >= 0);
+        this.config::getLong, p -> p >= 0);
     this.setOptionalProperty(propBuilder, StreamsConfig.PROCESSING_GUARANTEE_CONFIG,
-        this.config::getString, p -> StreamsConfig.AT_LEAST_ONCE.equals(p)
-            || StreamsConfig.EXACTLY_ONCE.equals(p) || StreamsConfig.EXACTLY_ONCE_BETA.equals(p));
+        this.config::getString, this::validateProcessingGuarantee);
     this.setOptionalProperty(propBuilder, StreamsConfig.REPLICATION_FACTOR_CONFIG,
         this.config::getInt, p -> p >= 0);
 
-    if (this.config.containsKey(StreamsConfig.TOPOLOGY_OPTIMIZATION)
-        && this.config.getBoolean(StreamsConfig.TOPOLOGY_OPTIMIZATION)) {
-      propBuilder.set(StreamsConfig.TOPOLOGY_OPTIMIZATION, StreamsConfig.OPTIMIZE);
+    if (this.config.containsKey(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG)
+        && this.config.getBoolean(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG)) {
+      propBuilder.set(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, StreamsConfig.OPTIMIZE);
     }
 
     return propBuilder.build();
   }
 
+  @SuppressWarnings("deprecation")
+  private boolean validateProcessingGuarantee(final String processingGuarantee) {
+    return StreamsConfig.AT_LEAST_ONCE.equals(processingGuarantee)
+        // We continue support EXACTLY_ONCE to allow benchmarking it against v2
+        || StreamsConfig.EXACTLY_ONCE.equals(processingGuarantee)
+        || StreamsConfig.EXACTLY_ONCE_V2.equals(processingGuarantee);
+  }
+
   /**
    * Method to implement a {@link Topology} for a {@code KafkaStreams} application.
    *
@@ -116,7 +119,7 @@ public abstract class KafkaStreamsBuilder {
    * Builds the {@link KafkaStreams} instance.
    */
   public KafkaStreams build() {
-    // Create the Kafka streams instance.
+    // Create the Kafka Streams instance.
     final Properties properties = this.buildProperties();
     return new KafkaStreams(this.buildTopology(properties), properties);
   }
diff --git a/theodolite-benchmarks/load-generator-commons/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/load-generator-commons/.settings/org.eclipse.jdt.ui.prefs
index fa98ca63d77bdee891150bd6713f70197a75cefc..a375cb792eeb842ecfd1f789fbf6a716df43e9c8 100644
--- a/theodolite-benchmarks/load-generator-commons/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/load-generator-commons/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/load-generator-commons/build.gradle b/theodolite-benchmarks/load-generator-commons/build.gradle
index f2aa10b079f4be80d19d9ac5d822b7bdab0b6d78..2d8f77b5154b5b788e0729da69122b443740ce75 100644
--- a/theodolite-benchmarks/load-generator-commons/build.gradle
+++ b/theodolite-benchmarks/load-generator-commons/build.gradle
@@ -13,14 +13,16 @@ repositories {
 }
 
 dependencies {
-  implementation 'com.google.guava:guava:30.1-jre'
   implementation 'com.hazelcast:hazelcast:4.1.1'
   implementation 'com.hazelcast:hazelcast-kubernetes:2.2.1'
   implementation 'org.slf4j:slf4j-simple:1.7.25'
+  implementation 'com.google.guava:guava:30.1-jre'
+  implementation 'com.google.code.gson:gson:2.8.2'
   implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
   implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
   implementation 'org.apache.kafka:kafka-streams:2.6.0' // TODO required?
 
   // Use JUnit test framework
   testImplementation 'junit:junit:4.12'
+  testImplementation 'com.github.tomakehurst:wiremock-jre8:2.32.0'
 }
diff --git a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ConfigurationKeys.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ConfigurationKeys.java
index 45ac1d5bb9c21a1b6303de2f248d08b69c02fc28..7a60e271f04e396b2e0c69b1fcfee1d8a1ca8a7d 100644
--- a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ConfigurationKeys.java
+++ b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ConfigurationKeys.java
@@ -23,6 +23,8 @@ public final class ConfigurationKeys {
 
   public static final String THREADS = "THREADS";
 
+  public static final String TARGET = "TARGET";
+
   public static final String KAFKA_BOOTSTRAP_SERVERS = "KAFKA_BOOTSTRAP_SERVERS";
 
   public static final String SCHEMA_REGISTRY_URL = "SCHEMA_REGISTRY_URL";
@@ -35,6 +37,8 @@ public final class ConfigurationKeys {
 
   public static final String KAFKA_BUFFER_MEMORY = "KAFKA_BUFFER_MEMORY";
 
+  public static final String HTTP_URL = "HTTP_URL";
+
   private ConfigurationKeys() {}
 
 }
diff --git a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HttpRecordSender.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HttpRecordSender.java
new file mode 100644
index 0000000000000000000000000000000000000000..6b7a5db067c8117f046aa0ff1c6f5d56c35c4321
--- /dev/null
+++ b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HttpRecordSender.java
@@ -0,0 +1,93 @@
+package theodolite.commons.workloadgeneration;
+
+import com.google.gson.Gson;
+import java.net.URI;
+import java.net.http.HttpClient;
+import java.net.http.HttpRequest;
+import java.net.http.HttpResponse;
+import java.net.http.HttpResponse.BodyHandler;
+import java.net.http.HttpResponse.BodyHandlers;
+import java.util.List;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import org.apache.avro.specific.SpecificRecord;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Sends monitoring records via HTTP.
+ *
+ * @param <T> {@link SpecificRecord} to send
+ */
+public class HttpRecordSender<T extends SpecificRecord> implements RecordSender<T> {
+
+  private static final int HTTP_OK = 200;
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HttpRecordSender.class);
+
+  private final Gson gson = new Gson();
+
+  private final HttpClient httpClient = HttpClient.newBuilder().build();
+
+  private final URI uri;
+
+  private final boolean async;
+
+  private final List<Integer> validStatusCodes;
+
+  /**
+   * Create a new {@link HttpRecordSender}.
+   *
+   * @param uri the {@link URI} records should be sent to
+   */
+  public HttpRecordSender(final URI uri) {
+    this(uri, true, List.of(HTTP_OK));
+  }
+
+  /**
+   * Create a new {@link HttpRecordSender}.
+   *
+   * @param uri the {@link URI} records should be sent to
+   * @param async whether HTTP requests should be sent asynchronous
+   * @param validStatusCodes a list of HTTP status codes which are considered as successful
+   */
+  public HttpRecordSender(final URI uri, final boolean async,
+      final List<Integer> validStatusCodes) {
+    this.uri = uri;
+    this.async = async;
+    this.validStatusCodes = validStatusCodes;
+  }
+
+  @Override
+  public void send(final T message) {
+    final String json = this.gson.toJson(message);
+    final HttpRequest request = HttpRequest.newBuilder()
+        .uri(this.uri)
+        .POST(HttpRequest.BodyPublishers.ofString(json))
+        .build();
+    final BodyHandler<Void> bodyHandler = BodyHandlers.discarding();
+    // final BodyHandler<String> bodyHandler = BodyHandlers.ofString();
+
+    final CompletableFuture<HttpResponse<Void>> result =
+        this.httpClient.sendAsync(request, bodyHandler)
+            .whenComplete((response, exception) -> {
+              if (exception != null) { // NOPMD
+                LOGGER.warn("Couldn't send request to {}.", this.uri, exception); // NOPMD false-p.
+              } else if (!this.validStatusCodes.contains(response.statusCode())) { // NOPMD
+                LOGGER.warn("Received status code {} for request to {}.", response.statusCode(),
+                    this.uri);
+              } else {
+                LOGGER.debug("Sucessfully sent request to {} (status={}).", this.uri,
+                    response.statusCode());
+              }
+            });
+    if (this.async) {
+      try {
+        result.get();
+      } catch (InterruptedException | ExecutionException e) {
+        LOGGER.error("Couldn't get result for request to {}.", this.uri, e);
+      }
+    }
+  }
+
+}
diff --git a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java
index ded7c347c8d6b057581dc63b691df5bb60997791..44ff8a92afd5356b4bb2af203899a61f7af48b2d 100644
--- a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java
+++ b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java
@@ -15,7 +15,7 @@ import titan.ccp.common.kafka.avro.SchemaRegistryAvroSerdeFactory;
 /**
  * Sends monitoring records to Kafka.
  *
- * @param <T> {@link IMonitoringRecord} to send
+ * @param <T> {@link SpecificRecord} to send
  */
 public class KafkaRecordSender<T extends SpecificRecord> implements RecordSender<T> {
 
diff --git a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java
index 3f5d14c2e7dccb94e4aacde1f531ec2e9d1fb8db..6453ef0bd3b6d5a3b5f7f2b77fa20da8f79cb35f 100644
--- a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java
+++ b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java
@@ -1,11 +1,13 @@
 package theodolite.commons.workloadgeneration;
 
+import java.net.URI;
 import java.time.Duration;
 import java.util.Objects;
 import java.util.Properties;
 import org.apache.kafka.clients.producer.ProducerConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import titan.ccp.model.records.ActivePowerRecord;
 
 /**
  * A Theodolite load generator.
@@ -20,9 +22,11 @@ public final class LoadGenerator {
   private static final int PERIOD_MS_DEFAULT = 1000;
   private static final int VALUE_DEFAULT = 10;
   private static final int THREADS_DEFAULT = 4;
+  private static final LoadGeneratorTarget TARGET_DEFAULT = LoadGeneratorTarget.KAFKA;
   private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
   private static final String KAFKA_TOPIC_DEFAULT = "input";
   private static final String KAFKA_BOOTSTRAP_SERVERS_DEFAULT = "localhost:9092"; // NOPMD
+  private static final String HTTP_URI_DEFAULT = "http://localhost:8080";
 
   private ClusterConfig clusterConfig;
   private WorkloadDefinition loadDefinition;
@@ -91,7 +95,7 @@ public final class LoadGenerator {
             new KeySpace(SENSOR_PREFIX_DEFAULT, NUMBER_OF_KEYS_DEFAULT),
             Duration.ofMillis(PERIOD_MS_DEFAULT)))
         .setGeneratorConfig(new LoadGeneratorConfig(
-            TitanRecordGeneratorFactory.forConstantValue(VALUE_DEFAULT),
+            TitanRecordGenerator.forConstantValue(VALUE_DEFAULT),
             TitanKafkaSenderFactory.forKafkaConfig(
                 KAFKA_BOOTSTRAP_SERVERS_DEFAULT,
                 KAFKA_TOPIC_DEFAULT,
@@ -134,6 +138,47 @@ public final class LoadGenerator {
       clusterConfig.setClusterNamePrefix(portAutoIncrement);
     }
 
+    final LoadGeneratorTarget target = LoadGeneratorTarget.from(
+        Objects.requireNonNullElse(System.getenv(ConfigurationKeys.TARGET),
+            TARGET_DEFAULT.getValue()));
+
+    final RecordSender<ActivePowerRecord> recordSender; // NOPMD
+    if (target == LoadGeneratorTarget.KAFKA) {
+      final String kafkaBootstrapServers = Objects.requireNonNullElse(
+          System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+          KAFKA_BOOTSTRAP_SERVERS_DEFAULT);
+      final String kafkaInputTopic = Objects.requireNonNullElse(
+          System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+          KAFKA_TOPIC_DEFAULT);
+      final String schemaRegistryUrl = Objects.requireNonNullElse(
+          System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+          SCHEMA_REGISTRY_URL_DEFAULT);
+      final Properties kafkaProperties = new Properties();
+      kafkaProperties.compute(ProducerConfig.BATCH_SIZE_CONFIG,
+          (k, v) -> System.getenv(ConfigurationKeys.KAFKA_BATCH_SIZE));
+      kafkaProperties.compute(ProducerConfig.LINGER_MS_CONFIG,
+          (k, v) -> System.getenv(ConfigurationKeys.KAFKA_LINGER_MS));
+      kafkaProperties.compute(ProducerConfig.BUFFER_MEMORY_CONFIG,
+          (k, v) -> System.getenv(ConfigurationKeys.KAFKA_BUFFER_MEMORY));
+      recordSender = TitanKafkaSenderFactory.forKafkaConfig(
+          kafkaBootstrapServers,
+          kafkaInputTopic,
+          schemaRegistryUrl);
+      LOGGER.info(
+          "Use Kafka as target with bootstrap server '{}', schema registry url '{}' and topic '{}'.", // NOCS
+          kafkaBootstrapServers, schemaRegistryUrl, kafkaInputTopic);
+    } else if (target == LoadGeneratorTarget.HTTP) {
+      final URI url = URI.create(
+          Objects.requireNonNullElse(
+              System.getenv(ConfigurationKeys.HTTP_URL),
+              HTTP_URI_DEFAULT));
+      recordSender = new HttpRecordSender<>(url);
+      LOGGER.info("Use HTTP server as target with url '{}'.", url);
+    } else {
+      // Should never happen
+      throw new IllegalStateException("Target " + target + " is not handled yet.");
+    }
+
     final int numSensors = Integer.parseInt(Objects.requireNonNullElse(
         System.getenv(ConfigurationKeys.NUM_SENSORS),
         Integer.toString(NUMBER_OF_KEYS_DEFAULT)));
@@ -146,22 +191,6 @@ public final class LoadGenerator {
     final int threads = Integer.parseInt(Objects.requireNonNullElse(
         System.getenv(ConfigurationKeys.THREADS),
         Integer.toString(THREADS_DEFAULT)));
-    final String kafkaBootstrapServers = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
-        KAFKA_BOOTSTRAP_SERVERS_DEFAULT);
-    final String kafkaInputTopic = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
-        KAFKA_TOPIC_DEFAULT);
-    final String schemaRegistryUrl = Objects.requireNonNullElse(
-        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
-        SCHEMA_REGISTRY_URL_DEFAULT);
-    final Properties kafkaProperties = new Properties();
-    kafkaProperties.compute(ProducerConfig.BATCH_SIZE_CONFIG,
-        (k, v) -> System.getenv(ConfigurationKeys.KAFKA_BATCH_SIZE));
-    kafkaProperties.compute(ProducerConfig.LINGER_MS_CONFIG,
-        (k, v) -> System.getenv(ConfigurationKeys.KAFKA_LINGER_MS));
-    kafkaProperties.compute(ProducerConfig.BUFFER_MEMORY_CONFIG,
-        (k, v) -> System.getenv(ConfigurationKeys.KAFKA_BUFFER_MEMORY));
 
     return new LoadGenerator()
         .setClusterConfig(clusterConfig)
@@ -169,11 +198,8 @@ public final class LoadGenerator {
             new KeySpace(SENSOR_PREFIX_DEFAULT, numSensors),
             Duration.ofMillis(periodMs)))
         .setGeneratorConfig(new LoadGeneratorConfig(
-            TitanRecordGeneratorFactory.forConstantValue(value),
-            TitanKafkaSenderFactory.forKafkaConfig(
-                kafkaBootstrapServers,
-                kafkaInputTopic,
-                schemaRegistryUrl)))
+            TitanRecordGenerator.forConstantValue(value),
+            recordSender))
         .withThreads(threads);
   }
 
diff --git a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorTarget.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorTarget.java
new file mode 100644
index 0000000000000000000000000000000000000000..086e4de36301693c6873016122a47709b858a0d4
--- /dev/null
+++ b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorTarget.java
@@ -0,0 +1,26 @@
+package theodolite.commons.workloadgeneration;
+
+import java.util.stream.Stream;
+
+enum LoadGeneratorTarget {
+
+  KAFKA("kafka"), HTTP("http");
+
+  private final String value;
+
+  LoadGeneratorTarget(final String value) {
+    this.value = value;
+  }
+
+  String getValue() {
+    return this.value;
+  }
+
+  static LoadGeneratorTarget from(final String value) {
+    return Stream.of(LoadGeneratorTarget.values())
+        .filter(t -> t.value.equals(value))
+        .findFirst()
+        .orElseThrow(() -> new IllegalArgumentException("Target '" + value + "' does not exist."));
+  }
+
+}
diff --git a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanRecordGenerator.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanRecordGenerator.java
new file mode 100644
index 0000000000000000000000000000000000000000..cebdacaee9a8e7d05787fdf3f846d49914574828
--- /dev/null
+++ b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanRecordGenerator.java
@@ -0,0 +1,38 @@
+package theodolite.commons.workloadgeneration;
+
+import java.time.Clock;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * A factory for creating {@link RecordGenerator}s that creates Titan {@link ActivePowerRecord}s.
+ */
+public final class TitanRecordGenerator implements RecordGenerator<ActivePowerRecord> {
+
+  private final Clock clock;
+
+  private final double constantValue;
+
+  private TitanRecordGenerator(final double constantValue) {
+    this.constantValue = constantValue;
+    this.clock = Clock.systemUTC();
+  }
+
+  /* default */ TitanRecordGenerator(final double constantValue, final Clock clock) {
+    this.constantValue = constantValue;
+    this.clock = clock;
+  }
+
+  /**
+   * Create a {@link RecordGenerator} that generates Titan {@link ActivePowerRecord}s with a
+   * constant value.
+   */
+  public static RecordGenerator<ActivePowerRecord> forConstantValue(final double value) {
+    return new TitanRecordGenerator(value);
+  }
+
+  @Override
+  public ActivePowerRecord generate(final String key) {
+    return new ActivePowerRecord(key, this.clock.millis(), this.constantValue);
+  }
+
+}
diff --git a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanRecordGeneratorFactory.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanRecordGeneratorFactory.java
deleted file mode 100644
index 4e1c10071eff28d77514dbc121e30bead3f6fa74..0000000000000000000000000000000000000000
--- a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanRecordGeneratorFactory.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package theodolite.commons.workloadgeneration;
-
-import titan.ccp.model.records.ActivePowerRecord;
-
-/**
- * A factory for creating {@link RecordGenerator}s that creates Titan {@link ActivePowerRecord}s.
- */
-public final class TitanRecordGeneratorFactory {
-
-
-  private TitanRecordGeneratorFactory() {}
-
-  /**
-   * Create a {@link RecordGenerator} that generates Titan {@link ActivePowerRecord}s with a
-   * constant value.
-   */
-  public static RecordGenerator<ActivePowerRecord> forConstantValue(final double value) {
-    return sensor -> new ActivePowerRecord(sensor, System.currentTimeMillis(), value);
-  }
-
-}
diff --git a/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/HttpRecordSenderTest.java b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/HttpRecordSenderTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..0d331a900f5bd5c18dbeaf2fc2a249256151ce70
--- /dev/null
+++ b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/HttpRecordSenderTest.java
@@ -0,0 +1,49 @@
+package theodolite.commons.workloadgeneration;
+
+import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
+import static com.github.tomakehurst.wiremock.client.WireMock.equalTo;
+import static com.github.tomakehurst.wiremock.client.WireMock.exactly;
+import static com.github.tomakehurst.wiremock.client.WireMock.post;
+import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor;
+import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
+import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo;
+import static com.github.tomakehurst.wiremock.client.WireMock.verify;
+import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+import java.net.URI;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import titan.ccp.model.records.ActivePowerRecord;
+
+public class HttpRecordSenderTest {
+
+  private HttpRecordSender<ActivePowerRecord> httpRecordSender;
+
+  @Rule
+  public WireMockRule wireMockRule = new WireMockRule(options().dynamicPort());
+
+  @Before
+  public void setup() {
+    this.httpRecordSender =
+        new HttpRecordSender<>(URI.create("http://localhost:" + this.wireMockRule.port()));
+  }
+
+  @Test
+  public void testValidUri() {
+    this.wireMockRule.stubFor(
+        post(urlPathEqualTo("/"))
+            .willReturn(
+                aResponse()
+                    .withStatus(200)
+                    .withBody("received")));
+
+    final ActivePowerRecord record = new ActivePowerRecord("my-id", 12345L, 12.34);
+    this.httpRecordSender.send(record);
+
+    final String expectedJson = "{\"identifier\":\"my-id\",\"timestamp\":12345,\"valueInW\":12.34}";
+    verify(exactly(1), postRequestedFor(urlEqualTo("/"))
+        .withRequestBody(equalTo(expectedJson))); // toJson
+  }
+
+}
diff --git a/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/KeySpaceTest.java b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/KeySpaceTest.java
index 20c094ddcc7ff110a25aaffa494766e89d4d2475..49004839a9c8fd280aba5006a1f08c2acb3c3136 100644
--- a/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/KeySpaceTest.java
+++ b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/KeySpaceTest.java
@@ -2,7 +2,6 @@ package theodolite.commons.workloadgeneration;
 
 import org.junit.Assert;
 import org.junit.Test;
-import theodolite.commons.workloadgeneration.KeySpace;
 
 public class KeySpaceTest {
 
diff --git a/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/LoadGeneratorTargetTest.java b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/LoadGeneratorTargetTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..644ffad9a4d2732f72ac307294d1311eba3a9ce8
--- /dev/null
+++ b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/LoadGeneratorTargetTest.java
@@ -0,0 +1,26 @@
+package theodolite.commons.workloadgeneration;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class LoadGeneratorTargetTest {
+
+  @Test
+  public void testFromKafka() {
+    final LoadGeneratorTarget target = LoadGeneratorTarget.from("kafka");
+    Assert.assertEquals(LoadGeneratorTarget.KAFKA, target);
+  }
+
+  @Test
+  public void testFromHttp() {
+    final LoadGeneratorTarget target = LoadGeneratorTarget.from("http");
+    Assert.assertEquals(LoadGeneratorTarget.HTTP, target);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testFromInvalidTarget() {
+    LoadGeneratorTarget.from("<invalid-target>");
+  }
+
+
+}
diff --git a/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/TitanRecordGeneratorTest.java b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/TitanRecordGeneratorTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..04ba38b9c8fcd41df46d3d3070a6308acfd72cb7
--- /dev/null
+++ b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/TitanRecordGeneratorTest.java
@@ -0,0 +1,40 @@
+package theodolite.commons.workloadgeneration;
+
+import java.time.Clock;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import org.junit.Assert;
+import org.junit.Test;
+import titan.ccp.model.records.ActivePowerRecord;
+
+public class TitanRecordGeneratorTest {
+
+  @Test
+  public void testGenerate() {
+    final ZoneId zoneId = ZoneOffset.UTC;
+    final LocalDateTime dateTime = LocalDateTime.of(2022, 1, 17, 14, 2, 42);
+    final Instant instant = dateTime.atZone(zoneId).toInstant();
+    final TitanRecordGenerator generator =
+        new TitanRecordGenerator(42.0, Clock.fixed(instant, zoneId));
+
+    final ActivePowerRecord activePowerRecord = generator.generate("my-identifier");
+    Assert.assertEquals("my-identifier", activePowerRecord.getIdentifier());
+    Assert.assertEquals(instant.toEpochMilli(), activePowerRecord.getTimestamp());
+    Assert.assertEquals(42.0, activePowerRecord.getValueInW(), 0.001);
+  }
+
+  @Test
+  public void testTimestampForArbitraryClockTimeZone() {
+    final LocalDateTime dateTime = LocalDateTime.of(2022, 1, 17, 14, 2, 42);
+    final Instant instant = dateTime.atZone(ZoneId.of("Europe/Paris")).toInstant();
+    // Setting of ZoneId should have no impact on result as we request epoch millis
+    final Clock clock = Clock.fixed(instant, ZoneId.of("America/Sao_Paulo"));
+    final TitanRecordGenerator generator = new TitanRecordGenerator(42.0, clock);
+
+    final ActivePowerRecord activePowerRecord = generator.generate("my-identifier");
+    Assert.assertEquals(instant.toEpochMilli(), activePowerRecord.getTimestamp());
+  }
+
+}
diff --git a/theodolite-benchmarks/settings.gradle b/theodolite-benchmarks/settings.gradle
index 8c452f3b58c901f477b80169f8bded80c35ff548..4ef9d714edc9aa2f46549382d25127d7b40e91fd 100644
--- a/theodolite-benchmarks/settings.gradle
+++ b/theodolite-benchmarks/settings.gradle
@@ -38,3 +38,4 @@ include 'uc4-beam'
 include 'uc4-beam-flink'
 include 'uc4-beam-samza'
 
+include 'http-bridge'
diff --git a/theodolite-benchmarks/uc1-beam-samza/Dockerfile b/theodolite-benchmarks/uc1-beam-samza/Dockerfile
index 9b729060532ea3a242ac3084ba0bebf88ca2e9b6..cf6ef6675464e3c9d37db492b39fd8a71ec60e63 100644
--- a/theodolite-benchmarks/uc1-beam-samza/Dockerfile
+++ b/theodolite-benchmarks/uc1-beam-samza/Dockerfile
@@ -5,5 +5,5 @@ ENV MAX_SOURCE_PARALLELISM=1024
 ADD build/distributions/uc1-beam-samza.tar /
 ADD samza-standalone.properties /
 
-CMD /uc1-beam-samza/bin/uc1-beam-samza --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
+CMD /uc1-beam-samza/bin/uc1-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
 
diff --git a/theodolite-benchmarks/uc1-beam/src/main/java/application/LogKeyValue.java b/theodolite-benchmarks/uc1-beam/src/main/java/application/LogKeyValue.java
index 79566fd937b9c100663d426610b6ff476035ef87..251523441e339cbaf58c7e3a1b30e97cc354df18 100644
--- a/theodolite-benchmarks/uc1-beam/src/main/java/application/LogKeyValue.java
+++ b/theodolite-benchmarks/uc1-beam/src/main/java/application/LogKeyValue.java
@@ -8,7 +8,6 @@ import org.slf4j.LoggerFactory;
 /**
  * Logs all Key Value pairs.
  */
-@SuppressWarnings({"unused"})
 public class LogKeyValue extends DoFn<KV<String, String>, KV<String, String>> {
   private static final long serialVersionUID = 4328743;
   private static final Logger LOGGER = LoggerFactory.getLogger(LogKeyValue.class);
@@ -19,9 +18,7 @@ public class LogKeyValue extends DoFn<KV<String, String>, KV<String, String>> {
   @ProcessElement
   public void processElement(@Element final KV<String, String> kv,
       final OutputReceiver<KV<String, String>> out) {
-    if (LOGGER.isInfoEnabled()) {
-      LOGGER.info("Key: {}, Value: {}", kv.getKey(), kv.getValue());
-    }
+    LOGGER.info("Key: {}, Value: {}", kv.getKey(), kv.getValue());
     out.output(kv);
   }
 }
diff --git a/theodolite-benchmarks/uc1-flink/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-flink/.settings/org.eclipse.jdt.ui.prefs
index fa98ca63d77bdee891150bd6713f70197a75cefc..a375cb792eeb842ecfd1f789fbf6a716df43e9c8 100644
--- a/theodolite-benchmarks/uc1-flink/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc1-flink/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc1-flink/Dockerfile b/theodolite-benchmarks/uc1-flink/Dockerfile
index 795b9e343a03cf0209e1625f5cbc3d45dcb77cda..fdaecd661e86275d670423351664e54221ce33cc 100644
--- a/theodolite-benchmarks/uc1-flink/Dockerfile
+++ b/theodolite-benchmarks/uc1-flink/Dockerfile
@@ -1,3 +1,3 @@
-FROM flink:1.12-scala_2.12-java11
+FROM flink:1.13-java11
 
 ADD build/libs/uc1-flink-all.jar /opt/flink/usrlib/artifacts/uc1-flink-all.jar
diff --git a/theodolite-benchmarks/uc1-kstreams/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-kstreams/.settings/org.eclipse.jdt.ui.prefs
index fa98ca63d77bdee891150bd6713f70197a75cefc..a375cb792eeb842ecfd1f789fbf6a716df43e9c8 100644
--- a/theodolite-benchmarks/uc1-kstreams/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc1-kstreams/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc1-load-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-load-generator/.settings/org.eclipse.jdt.ui.prefs
index 4d01df75552c562406705858b6368ecf59d6e82f..ac23341bf71ac68df4183361493261758fd5dafb 100644
--- a/theodolite-benchmarks/uc1-load-generator/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc1-load-generator/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc2-beam-samza/Dockerfile b/theodolite-benchmarks/uc2-beam-samza/Dockerfile
index 22855cea279819cacbf6eee253c30c60409fdba3..ae762791c40fc6981ce7e5fd08bea860ed9208ec 100644
--- a/theodolite-benchmarks/uc2-beam-samza/Dockerfile
+++ b/theodolite-benchmarks/uc2-beam-samza/Dockerfile
@@ -5,4 +5,4 @@ ENV MAX_SOURCE_PARALLELISM=1024
 ADD build/distributions/uc2-beam-samza.tar /
 ADD samza-standalone.properties /
 
-CMD /uc2-beam-samza/bin/uc2-beam-samza --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
+CMD /uc2-beam-samza/bin/uc2-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
diff --git a/theodolite-benchmarks/uc2-flink/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc2-flink/.settings/org.eclipse.jdt.ui.prefs
index 4d01df75552c562406705858b6368ecf59d6e82f..ac23341bf71ac68df4183361493261758fd5dafb 100644
--- a/theodolite-benchmarks/uc2-flink/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc2-flink/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc2-flink/Dockerfile b/theodolite-benchmarks/uc2-flink/Dockerfile
index 537ab28e2d4e5fb8edfc2760142acc33cc49b91d..01a85c57b00ea8bb4af8faa697708fd2b650de50 100644
--- a/theodolite-benchmarks/uc2-flink/Dockerfile
+++ b/theodolite-benchmarks/uc2-flink/Dockerfile
@@ -1,3 +1,3 @@
-FROM flink:1.12-scala_2.12-java11
+FROM flink:1.13-java11
 
 ADD build/libs/uc2-flink-all.jar /opt/flink/usrlib/artifacts/uc2-flink-all.jar
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/HistoryServiceFlinkJob.java b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/HistoryServiceFlinkJob.java
index d156d895d86bb01a31f96e08764df8b8df743c4d..7a97099c71a18449b7cc3f0413632b52fd5b69f5 100644
--- a/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/HistoryServiceFlinkJob.java
+++ b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/HistoryServiceFlinkJob.java
@@ -5,7 +5,6 @@ import org.apache.commons.configuration2.Configuration;
 import org.apache.flink.api.common.typeinfo.Types;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.runtime.state.StateBackend;
-import org.apache.flink.streaming.api.TimeCharacteristic;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
 import org.apache.flink.streaming.api.windowing.time.Time;
@@ -48,8 +47,6 @@ public final class HistoryServiceFlinkJob {
   }
 
   private void configureEnv() {
-    this.env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
-
     final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true);
     final int commitIntervalMs = this.config.getInt(ConfigurationKeys.COMMIT_INTERVAL_MS);
     if (checkpointing) {
diff --git a/theodolite-benchmarks/uc2-kstreams/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc2-kstreams/.settings/org.eclipse.jdt.ui.prefs
index 4d01df75552c562406705858b6368ecf59d6e82f..ac23341bf71ac68df4183361493261758fd5dafb 100644
--- a/theodolite-benchmarks/uc2-kstreams/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc2-kstreams/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/TopologyBuilder.java b/theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/TopologyBuilder.java
index eda7c495a2cff6d58b62a8a6a74ea8e1b2d89aca..21dcf14a9322ce5a6381f96f22f5fadb85cc78f0 100644
--- a/theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/TopologyBuilder.java
+++ b/theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/TopologyBuilder.java
@@ -53,7 +53,7 @@ public class TopologyBuilder {
             Consumed.with(Serdes.String(),
                 this.srAvroSerdeFactory.<ActivePowerRecord>forValues()))
         .groupByKey()
-        .windowedBy(TimeWindows.of(this.duration))
+        .windowedBy(TimeWindows.ofSizeWithNoGrace(this.duration))
         // .aggregate(
         // () -> 0.0,
         // (key, activePowerRecord, agg) -> agg + activePowerRecord.getValueInW(),
diff --git a/theodolite-benchmarks/uc2-load-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc2-load-generator/.settings/org.eclipse.jdt.ui.prefs
index 4d01df75552c562406705858b6368ecf59d6e82f..ac23341bf71ac68df4183361493261758fd5dafb 100644
--- a/theodolite-benchmarks/uc2-load-generator/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc2-load-generator/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc3-beam-samza/Dockerfile b/theodolite-benchmarks/uc3-beam-samza/Dockerfile
index d3e860bd72c54121d616bc5562d519e6e1e21dec..54979b8e1fa8aa9ac7d073302301bd10cbff5f34 100644
--- a/theodolite-benchmarks/uc3-beam-samza/Dockerfile
+++ b/theodolite-benchmarks/uc3-beam-samza/Dockerfile
@@ -5,4 +5,4 @@ ENV MAX_SOURCE_PARALLELISM=1024
 ADD build/distributions/uc3-beam-samza.tar /
 ADD samza-standalone.properties /
 
-CMD /uc3-beam-samza/bin/uc3-beam-samza --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
+CMD /uc3-beam-samza/bin/uc3-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
diff --git a/theodolite-benchmarks/uc3-flink/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc3-flink/.settings/org.eclipse.jdt.ui.prefs
index 4d01df75552c562406705858b6368ecf59d6e82f..ac23341bf71ac68df4183361493261758fd5dafb 100644
--- a/theodolite-benchmarks/uc3-flink/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc3-flink/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc3-flink/Dockerfile b/theodolite-benchmarks/uc3-flink/Dockerfile
index 34c6da692cb30b738adf47b9d4ca893e72f330e4..cef05c0296f55f0cf7391dd35dd1806ec0efa287 100644
--- a/theodolite-benchmarks/uc3-flink/Dockerfile
+++ b/theodolite-benchmarks/uc3-flink/Dockerfile
@@ -1,3 +1,3 @@
-FROM flink:1.12-scala_2.12-java11
+FROM flink:1.13-java11
 
 ADD build/libs/uc3-flink-all.jar /opt/flink/usrlib/artifacts/uc3-flink-all.jar
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HistoryServiceFlinkJob.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HistoryServiceFlinkJob.java
index 091b25674a2a31671ca68bd2076c694da9533d77..621146864fc84d032a7dc3c65fa253df9b940b2d 100644
--- a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HistoryServiceFlinkJob.java
+++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HistoryServiceFlinkJob.java
@@ -9,7 +9,6 @@ import org.apache.flink.api.common.typeinfo.Types;
 import org.apache.flink.api.java.functions.KeySelector;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.runtime.state.StateBackend;
-import org.apache.flink.streaming.api.TimeCharacteristic;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
 import org.apache.flink.streaming.api.windowing.time.Time;
@@ -55,8 +54,6 @@ public final class HistoryServiceFlinkJob {
   }
 
   private void configureEnv() {
-    this.env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
-
     final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true);
     final int commitIntervalMs = this.config.getInt(ConfigurationKeys.COMMIT_INTERVAL_MS);
     if (checkpointing) {
diff --git a/theodolite-benchmarks/uc3-kstreams/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc3-kstreams/.settings/org.eclipse.jdt.ui.prefs
index fa98ca63d77bdee891150bd6713f70197a75cefc..a375cb792eeb842ecfd1f789fbf6a716df43e9c8 100644
--- a/theodolite-benchmarks/uc3-kstreams/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc3-kstreams/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/TopologyBuilder.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/TopologyBuilder.java
index 1e976c07158720b3681d89413a5f277b1395f32d..4c63e21f3d9f1af6c9ef0363d7d01939faae9aef 100644
--- a/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/TopologyBuilder.java
+++ b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/TopologyBuilder.java
@@ -60,17 +60,18 @@ public class TopologyBuilder {
     final Serde<HourOfDayKey> keySerde = HourOfDayKeySerde.create();
 
     this.builder
-        .stream(this.inputTopic,
-            Consumed.with(Serdes.String(),
-                this.srAvroSerdeFactory.<ActivePowerRecord>forValues()))
+        .stream(this.inputTopic, Consumed.with(
+            Serdes.String(),
+            this.srAvroSerdeFactory.<ActivePowerRecord>forValues()))
         .selectKey((key, value) -> {
           final Instant instant = Instant.ofEpochMilli(value.getTimestamp());
           final LocalDateTime dateTime = LocalDateTime.ofInstant(instant, this.zone);
           return keyFactory.createKey(value.getIdentifier(), dateTime);
         })
-        .groupByKey(
-            Grouped.with(keySerde, this.srAvroSerdeFactory.forValues()))
-        .windowedBy(TimeWindows.of(this.aggregtionDuration).advanceBy(this.aggregationAdvance))
+        .groupByKey(Grouped.with(keySerde, this.srAvroSerdeFactory.forValues()))
+        .windowedBy(TimeWindows
+            .ofSizeWithNoGrace(this.aggregtionDuration)
+            .advanceBy(this.aggregationAdvance))
         .aggregate(
             () -> Stats.of(),
             (k, record, stats) -> StatsFactory.accumulate(stats, record.getValueInW()),
diff --git a/theodolite-benchmarks/uc3-load-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc3-load-generator/.settings/org.eclipse.jdt.ui.prefs
index fa98ca63d77bdee891150bd6713f70197a75cefc..a375cb792eeb842ecfd1f789fbf6a716df43e9c8 100644
--- a/theodolite-benchmarks/uc3-load-generator/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc3-load-generator/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc4-beam-samza/Dockerfile b/theodolite-benchmarks/uc4-beam-samza/Dockerfile
index ffb82ee09e99384d4914a0f86b6d9214fc161381..b59585f748b95cf62e59be01c8fa9dc0d919f43a 100644
--- a/theodolite-benchmarks/uc4-beam-samza/Dockerfile
+++ b/theodolite-benchmarks/uc4-beam-samza/Dockerfile
@@ -5,4 +5,4 @@ ENV MAX_SOURCE_PARALLELISM=1024
 ADD build/distributions/uc4-beam-samza.tar /
 ADD samza-standalone.properties /
 
-CMD /uc4-beam-samza/bin/uc4-beam-samza --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
+CMD /uc4-beam-samza/bin/uc4-beam-samza --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
diff --git a/theodolite-benchmarks/uc4-beam/build.gradle b/theodolite-benchmarks/uc4-beam/build.gradle
index 502e94fa737fb2ae1bab861407b27575cd8766ca..3e9d917cc3586e5df2c5645f1d2cbcf03e3993e4 100644
--- a/theodolite-benchmarks/uc4-beam/build.gradle
+++ b/theodolite-benchmarks/uc4-beam/build.gradle
@@ -2,4 +2,6 @@ plugins {
   id 'theodolite.beam'
 }
 
-
+dependencies {
+  implementation ('io.confluent:kafka-streams-avro-serde:5.3.2') 
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/DuplicateAsFlatMap.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/DuplicateAsFlatMap.java
index 7b66082c91b87c246d8c834249d2bc82545766f5..347d76dfb3d1d1f09f1091296a322a23bba67ec0 100644
--- a/theodolite-benchmarks/uc4-beam/src/main/java/application/DuplicateAsFlatMap.java
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/DuplicateAsFlatMap.java
@@ -14,13 +14,18 @@ import titan.ccp.model.records.ActivePowerRecord;
 
 
 /**
- * Duplicates the Kv containing the (Children,Parents) pair as a flat map.
+ * Duplicates the {@link KV} containing the (children,parents) pairs as flatMap.
  */
-public class DuplicateAsFlatMap extends DoFn
-    <KV<String, ActivePowerRecord>, KV<SensorParentKey, ActivePowerRecord>> {
+public class DuplicateAsFlatMap
+    extends DoFn<KV<String, ActivePowerRecord>, KV<SensorParentKey, ActivePowerRecord>> {
+
   private static final long serialVersionUID = -5132355515723961647L;
-  @StateId("parents")
-  private final StateSpec<ValueState<Set<String>>> parents = StateSpecs.value();//NOPMD
+
+  private static final String STATE_STORE_NAME = "DuplicateParents";
+
+  @StateId(STATE_STORE_NAME)
+  private final StateSpec<ValueState<Set<String>>> parents = StateSpecs.value(); // NOPMD
+
   private final PCollectionView<Map<String, Set<String>>> childParentPairMap;
 
   public DuplicateAsFlatMap(final PCollectionView<Map<String, Set<String>>> childParentPairMap) {
@@ -28,21 +33,21 @@ public class DuplicateAsFlatMap extends DoFn
     this.childParentPairMap = childParentPairMap;
   }
 
-
   /**
-   *  Generate a KV-pair for every child-parent match.
+   * Generate a KV-pair for every child-parent match.
    */
   @ProcessElement
-  public void processElement(@Element final KV<String, ActivePowerRecord> kv,
-                             final OutputReceiver<KV<SensorParentKey, ActivePowerRecord>> out,
-                             @StateId("parents") final ValueState<Set<String>> state,
-                             final ProcessContext c) {
+  public void processElement(
+      @Element final KV<String, ActivePowerRecord> kv,
+      final OutputReceiver<KV<SensorParentKey, ActivePowerRecord>> out,
+      @StateId(STATE_STORE_NAME) final ValueState<Set<String>> state,
+      final ProcessContext c) {
 
     final ActivePowerRecord record = kv.getValue() == null ? null : kv.getValue();
     final Set<String> newParents =
-        c.sideInput(childParentPairMap).get(kv.getKey()) == null
+        c.sideInput(this.childParentPairMap).get(kv.getKey()) == null
             ? Collections.emptySet()
-            : c.sideInput(childParentPairMap).get(kv.getKey());
+            : c.sideInput(this.childParentPairMap).get(kv.getKey());
     final Set<String> oldParents =
         MoreObjects.firstNonNull(state.read(), Collections.emptySet());
     // Forward new Pairs if they exist
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/Uc4BeamPipeline.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/Uc4BeamPipeline.java
index 7179fe5da937280d5baf72cd73cc392ef15a60e0..0c63e6f9322e5f70f1ad010de168f1a5292a45a4 100644
--- a/theodolite-benchmarks/uc4-beam/src/main/java/application/Uc4BeamPipeline.java
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/Uc4BeamPipeline.java
@@ -66,8 +66,8 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
     final Duration gracePeriod =
         Duration.standardSeconds(config.getInt(ConfigurationKeys.GRACE_PERIOD_MS));
 
-    // Build kafka configuration
-    final Map<String, Object> consumerConfig = this.buildConsumerConfig();
+    // Build Kafka configuration
+    final Map<String, Object> consumerConfig = super.buildConsumerConfig();
     final Map<String, Object> configurationConfig = this.configurationConfig(config);
 
     // Set Coders for Classes that will be distributed
@@ -77,25 +77,34 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
     // Read from Kafka
     // ActivePowerRecords
     final KafkaActivePowerTimestampReader kafkaActivePowerRecordReader =
-        new KafkaActivePowerTimestampReader(this.bootstrapServer, this.inputTopic, consumerConfig);
+        new KafkaActivePowerTimestampReader(
+            this.bootstrapServer,
+            this.inputTopic,
+            consumerConfig);
 
     // Configuration Events
     final KafkaGenericReader<Event, String> kafkaConfigurationReader =
         new KafkaGenericReader<>(
-            this.bootstrapServer, configurationTopic, configurationConfig,
-            EventDeserializer.class, StringDeserializer.class);
-
-    // Transform into AggregatedActivePowerRecords into ActivePowerRecords
-    final AggregatedToActive aggregatedToActive = new AggregatedToActive();
+            this.bootstrapServer,
+            configurationTopic,
+            configurationConfig,
+            EventDeserializer.class,
+            StringDeserializer.class);
 
     // Write to Kafka
     final KafkaWriterTransformation<AggregatedActivePowerRecord> kafkaOutput =
         new KafkaWriterTransformation<>(
-            this.bootstrapServer, outputTopic, AggregatedActivePowerRecordSerializer.class);
+            this.bootstrapServer,
+            outputTopic,
+            AggregatedActivePowerRecordSerializer.class,
+            super.buildProducerConfig());
 
     final KafkaWriterTransformation<AggregatedActivePowerRecord> kafkaFeedback =
         new KafkaWriterTransformation<>(
-            this.bootstrapServer, feedbackTopic, AggregatedActivePowerRecordSerializer.class);
+            this.bootstrapServer,
+            feedbackTopic,
+            AggregatedActivePowerRecordSerializer.class,
+            super.buildProducerConfig());
 
     // Apply pipeline transformations
     final PCollection<KV<String, ActivePowerRecord>> values = this
@@ -115,7 +124,10 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
             .withBootstrapServers(this.bootstrapServer)
             .withTopic(feedbackTopic)
             .withKeyDeserializer(StringDeserializer.class)
-            .withValueDeserializer(AggregatedActivePowerRecordDeserializer.class)
+            .withValueDeserializerAndCoder(
+                AggregatedActivePowerRecordDeserializer.class,
+                AvroCoder.of(AggregatedActivePowerRecord.class))
+            .withConsumerConfigUpdates(consumerConfig)
             .withTimestampPolicyFactory(
                 (tp, previousWaterMark) -> new AggregatedActivePowerRecordEventTimePolicy(
                     previousWaterMark))
@@ -123,11 +135,12 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
         .apply("Apply Windows", Window.into(FixedWindows.of(duration)))
         // Convert into the correct data format
         .apply("Convert AggregatedActivePowerRecord to ActivePowerRecord",
-            MapElements.via(aggregatedToActive))
+            MapElements.via(new AggregatedToActive()))
         .apply("Set trigger for feedback", Window
             .<KV<String, ActivePowerRecord>>configure()
             .triggering(Repeatedly.forever(
-                AfterProcessingTime.pastFirstElementInPane()
+                AfterProcessingTime
+                    .pastFirstElementInPane()
                     .plusDelayOf(triggerDelay)))
             .withAllowedLateness(gracePeriod)
             .discardingFiredPanes());
@@ -170,17 +183,13 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
                 .accumulatingFiredPanes())
             .apply(View.asMap());
 
-    final FilterNullValues filterNullValues = new FilterNullValues();
-
     // Build pairs of every sensor reading and parent
     final PCollection<KV<SensorParentKey, ActivePowerRecord>> flatMappedValues =
         inputCollection.apply(
             "Duplicate as flatMap",
-            ParDo.of(new DuplicateAsFlatMap(childParentPairMap))
-                .withSideInputs(childParentPairMap))
+            ParDo.of(new DuplicateAsFlatMap(childParentPairMap)).withSideInputs(childParentPairMap))
             .apply("Filter only latest changes", Latest.perKey())
-            .apply("Filter out null values",
-                Filter.by(filterNullValues));
+            .apply("Filter out null values", Filter.by(new FilterNullValues()));
 
     final SetIdForAggregated setIdForAggregated = new SetIdForAggregated();
     final SetKeyToGroup setKeyToGroup = new SetKeyToGroup();
@@ -204,8 +213,7 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
 
     aggregations.apply("Write to aggregation results", kafkaOutput);
 
-    aggregations
-        .apply("Write to feedback topic", kafkaFeedback);
+    aggregations.apply("Write to feedback topic", kafkaFeedback);
 
   }
 
@@ -217,14 +225,15 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
    */
   public Map<String, Object> configurationConfig(final Configuration config) {
     final Map<String, Object> consumerConfig = new HashMap<>();
-    consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
+    consumerConfig.put(
+        ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
         config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
-    consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
-        config
-            .getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
-
-    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, config
-        .getString(ConfigurationKeys.APPLICATION_NAME) + "-configuration");
+    consumerConfig.put(
+        ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
+        config.getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
+    consumerConfig.put(
+        ConsumerConfig.GROUP_ID_CONFIG, config
+            .getString(ConfigurationKeys.APPLICATION_NAME) + "-configuration");
     return consumerConfig;
   }
 
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/UpdateChildParentPairs.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/UpdateChildParentPairs.java
index 8692be5ae6637ebda86f10d66b43c6071264e099..cff04e132a93f6c8098c3039232dd48084e6d264 100644
--- a/theodolite-benchmarks/uc4-beam/src/main/java/application/UpdateChildParentPairs.java
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/UpdateChildParentPairs.java
@@ -12,11 +12,12 @@ import org.apache.beam.sdk.values.KV;
  */
 public class UpdateChildParentPairs extends DoFn<KV<String, Set<String>>, KV<String, Set<String>>> {
 
+  private static final String STATE_STORE_NAME = "UpdateParents";
+
   private static final long serialVersionUID = 1L;
 
-  @StateId("parents")
-  private final StateSpec<ValueState<Set<String>>> parents = // NOPMD
-      StateSpecs.value();
+  @StateId(STATE_STORE_NAME)
+  private final StateSpec<ValueState<Set<String>>> parents = StateSpecs.value(); // NOPMD
 
   /**
    * Match the changes accordingly.
@@ -24,9 +25,10 @@ public class UpdateChildParentPairs extends DoFn<KV<String, Set<String>>, KV<Str
    * @param kv the sensor parents set that contains the changes.
    */
   @ProcessElement
-  public void processElement(@Element final KV<String, Set<String>> kv,
+  public void processElement(
+      @Element final KV<String, Set<String>> kv,
       final OutputReceiver<KV<String, Set<String>>> out,
-      @StateId("parents") final ValueState<Set<String>> state) {
+      @StateId(STATE_STORE_NAME) final ValueState<Set<String>> state) {
     if (kv.getValue() == null || !kv.getValue().equals(state.read())) {
       out.output(kv);
       state.write(kv.getValue());
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordCoder.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordCoder.java
index d2b484f5ab30be63f311d6dbcf495baebbd5e2b4..3e0be0fa456efa3ec67504ea9d0e285ae8b3b913 100644
--- a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordCoder.java
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordCoder.java
@@ -11,8 +11,7 @@ import org.apache.beam.sdk.coders.CoderException;
 import titan.ccp.model.records.AggregatedActivePowerRecord;
 
 /**
- * Wrapper Class that encapsulates a AggregatedActivePowerRecord Serde in a
- * org.apache.beam.sdk.coders.Coder.
+ * {@link Coder} for an {@link AggregatedActivePowerRecord}.
  */
 @SuppressWarnings("serial")
 public class AggregatedActivePowerRecordCoder extends Coder<AggregatedActivePowerRecord>
@@ -45,13 +44,13 @@ public class AggregatedActivePowerRecordCoder extends Coder<AggregatedActivePowe
 
   @Override
   public List<? extends Coder<?>> getCoderArguments() {
-    return null;
+    return List.of();
   }
 
   @Override
   public void verifyDeterministic() throws NonDeterministicException {
     if (!DETERMINISTIC) {
-      throw new NonDeterministicException(this, "This class should be deterministic!");
+      throw new NonDeterministicException(this, "This class should be deterministic.");
     }
   }
 }
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordDeserializer.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordDeserializer.java
index 6e2f2765ff65d3bca2a127be36db0854f15afebc..3076861a53dac031afd9e8eb913b5a0bafe480c0 100644
--- a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordDeserializer.java
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordDeserializer.java
@@ -1,34 +1,12 @@
 package serialization;
 
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import org.apache.beam.sdk.coders.AvroCoder;
+import io.confluent.kafka.streams.serdes.avro.SpecificAvroDeserializer;
 import org.apache.kafka.common.serialization.Deserializer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import titan.ccp.model.records.AggregatedActivePowerRecord;
 
 /**
- * Wrapper Class that encapsulates a IMonitoringRecordSerde.serializer in a Deserializer
+ * {@link Deserializer} for an {@link AggregatedActivePowerRecord}.
  */
 public class AggregatedActivePowerRecordDeserializer
-    implements Deserializer<AggregatedActivePowerRecord> {
-
-  private static final Logger LOGGER =
-      LoggerFactory.getLogger(AggregatedActivePowerRecordDeserializer.class);
-
-  private final transient AvroCoder<AggregatedActivePowerRecord> avroEnCoder =
-      AvroCoder.of(AggregatedActivePowerRecord.class);
-
-  @Override
-  public AggregatedActivePowerRecord deserialize(final String topic, final byte[] data) {
-    AggregatedActivePowerRecord value = null;
-    try {
-      value = this.avroEnCoder.decode(new ByteArrayInputStream(data));
-    } catch (final IOException e) {
-      LOGGER.error("Could not deserialize AggregatedActivePowerRecord", e);
-    }
-    return value;
-  }
-
+    extends SpecificAvroDeserializer<AggregatedActivePowerRecord> {
 }
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordSerializer.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordSerializer.java
index 77b79d5465f1d561870bf5b04f8fa20f87076adb..26801d8a28b9756214c65c4e8190e15d04bb3e68 100644
--- a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordSerializer.java
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordSerializer.java
@@ -1,45 +1,12 @@
 package serialization;
 
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import org.apache.beam.sdk.coders.AvroCoder;
+import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer;
 import org.apache.kafka.common.serialization.Serializer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import titan.ccp.model.records.AggregatedActivePowerRecord;
 
 /**
- * Wrapper Class that encapsulates a IMonitoringRecordSerde.serializer in a Serializer
+ * {@link Serializer} for an {@link AggregatedActivePowerRecord}.
  */
 public class AggregatedActivePowerRecordSerializer
-    implements Serializer<AggregatedActivePowerRecord> {
-
-  private static final Logger LOGGER =
-      LoggerFactory.getLogger(AggregatedActivePowerRecordSerializer.class);
-
-  private final transient AvroCoder<AggregatedActivePowerRecord> avroEnCoder =
-      AvroCoder.of(AggregatedActivePowerRecord.class);
-
-  @Override
-  public byte[] serialize(final String topic, final AggregatedActivePowerRecord data) {
-    final ByteArrayOutputStream out = new ByteArrayOutputStream();
-    try {
-      this.avroEnCoder.encode(data, out);
-    } catch (final IOException e) {
-      LOGGER.error("Could not serialize AggregatedActivePowerRecord", e);
-    }
-    final byte[] result = out.toByteArray();
-    try {
-      out.close();
-    } catch (final IOException e) {
-      LOGGER.error(
-          "Could not close output stream after serialization of AggregatedActivePowerRecord", e);
-    }
-    return result;
-  }
-
-  @Override
-  public void close() {
-    Serializer.super.close();
-  }
+    extends SpecificAvroSerializer<AggregatedActivePowerRecord> {
 }
diff --git a/theodolite-benchmarks/uc4-flink/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc4-flink/.settings/org.eclipse.jdt.ui.prefs
index 272e01533f6a345d53d2635c47e38c6d3c33dc8a..08fcb07933ca19165976bffd5e7fdfdaf64ee1d2 100644
--- a/theodolite-benchmarks/uc4-flink/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc4-flink/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc4-flink/Dockerfile b/theodolite-benchmarks/uc4-flink/Dockerfile
index 4f51f379e5da436104bb8c914e3233d6ecb4ec1f..1fe58e9930db0ef30fa10d12519dd68ccdb2e97e 100644
--- a/theodolite-benchmarks/uc4-flink/Dockerfile
+++ b/theodolite-benchmarks/uc4-flink/Dockerfile
@@ -1,3 +1,3 @@
-FROM flink:1.12-scala_2.12-java11
+FROM flink:1.13-java11
 
 ADD build/libs/uc4-flink-all.jar /opt/flink/usrlib/artifacts/uc4-flink-all.jar
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/AggregationServiceFlinkJob.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/AggregationServiceFlinkJob.java
index 3e2878a893057024de00333492462f5029eb6d77..48eadff101905b746ab239f6030decb728c12475 100644
--- a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/AggregationServiceFlinkJob.java
+++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/AggregationServiceFlinkJob.java
@@ -9,12 +9,12 @@ import org.apache.flink.api.common.typeinfo.Types;
 import org.apache.flink.api.java.functions.KeySelector;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.runtime.state.StateBackend;
-import org.apache.flink.streaming.api.TimeCharacteristic;
 import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
 import org.apache.flink.streaming.api.windowing.time.Time;
 import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
+import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
 import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
 import org.apache.kafka.common.serialization.Serdes;
 import org.slf4j.Logger;
@@ -68,8 +68,6 @@ public final class AggregationServiceFlinkJob {
   }
 
   private void configureEnv() {
-    this.env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
-
     final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true);
     final int commitIntervalMs = this.config.getInt(ConfigurationKeys.COMMIT_INTERVAL_MS);
     if (checkpointing) {
@@ -103,9 +101,11 @@ public final class AggregationServiceFlinkJob {
     this.env.getConfig().registerTypeWithKryoSerializer(Set.of(1, 2, 3, 4).getClass(), // NOCS
         new ImmutableSetSerializer());
 
-    this.env.getConfig().getRegisteredTypesWithKryoSerializers()
-        .forEach((c, s) -> LOGGER.info("Class " + c.getName() + " registered with serializer "
-            + s.getSerializer().getClass().getName()));
+    this.env
+        .getConfig()
+        .getRegisteredTypesWithKryoSerializers()
+        .forEach((c, s) -> LOGGER.info("Class '{}' registered with serializer '{}'.", c.getName(),
+            s.getSerializer().getClass().getName()));
   }
 
   private void buildPipeline() {
@@ -134,12 +134,13 @@ public final class AggregationServiceFlinkJob {
     final FlinkKafkaConsumer<AggregatedActivePowerRecord> kafkaOutputSource =
         kafkaConnector.createConsumer(outputTopic, AggregatedActivePowerRecord.class);
 
-    final FlinkKafkaConsumer<Tuple2<Event, String>> kafkaConfigSource =
+    final FlinkKafkaConsumerBase<Tuple2<Event, String>> kafkaConfigSource =
         kafkaConnector.createConsumer(
             configurationTopic,
             EventSerde::serde,
             Serdes::String,
-            TupleType.of(TypeInformation.of(Event.class), Types.STRING));
+            TupleType.of(TypeInformation.of(Event.class), Types.STRING))
+            .setStartFromEarliest();
 
     // Sink to output topic with SensorId, AggregatedActivePowerRecord
     final FlinkKafkaProducer<Tuple2<String, AggregatedActivePowerRecord>> kafkaAggregationSink =
diff --git a/theodolite-benchmarks/uc4-kstreams/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc4-kstreams/.settings/org.eclipse.jdt.ui.prefs
index fa98ca63d77bdee891150bd6713f70197a75cefc..a375cb792eeb842ecfd1f789fbf6a716df43e9c8 100644
--- a/theodolite-benchmarks/uc4-kstreams/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc4-kstreams/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/TopologyBuilder.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/TopologyBuilder.java
index 623870313cd341d0594fee38d2fd0ae297abbeae..712b20cb63c2d9f6b77321eaf18eafe4b16854d2 100644
--- a/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/TopologyBuilder.java
+++ b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/TopologyBuilder.java
@@ -146,7 +146,7 @@ public class TopologyBuilder {
         .groupByKey(Grouped.with(
             SensorParentKeySerde.serde(),
             this.srAvroSerdeFactory.forValues()))
-        .windowedBy(TimeWindows.of(this.emitPeriod).grace(this.gracePeriod))
+        .windowedBy(TimeWindows.ofSizeAndGrace(this.emitPeriod, this.gracePeriod))
         .reduce(
             // TODO Configurable window aggregation function
             (oldVal, newVal) -> newVal.getTimestamp() >= oldVal.getTimestamp() ? newVal : oldVal,
diff --git a/theodolite-benchmarks/uc4-load-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc4-load-generator/.settings/org.eclipse.jdt.ui.prefs
index 4d01df75552c562406705858b6368ecf59d6e82f..ac23341bf71ac68df4183361493261758fd5dafb 100644
--- a/theodolite-benchmarks/uc4-load-generator/.settings/org.eclipse.jdt.ui.prefs
+++ b/theodolite-benchmarks/uc4-load-generator/.settings/org.eclipse.jdt.ui.prefs
@@ -61,7 +61,7 @@ cleanup_settings_version=2
 eclipse.preferences.version=1
 editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
 formatter_profile=_CAU-SE-Style
-formatter_settings_version=15
+formatter_settings_version=21
 org.eclipse.jdt.ui.ignorelowercasenames=true
 org.eclipse.jdt.ui.importorder=;
 org.eclipse.jdt.ui.ondemandthreshold=99
diff --git a/theodolite/README.md b/theodolite/README.md
index 96f56c20db1d0796ba692cc497b93532517526ff..f662329f7eda3a39632581b7125a2f2f2feced8a 100644
--- a/theodolite/README.md
+++ b/theodolite/README.md
@@ -1,12 +1,12 @@
-# Theodolite project
+# Theodolite
 
 This project uses Quarkus, the Supersonic Subatomic Java Framework.
 
-If you want to learn more about Quarkus, please visit its website: <https://quarkus.io/> .
+If you want to learn more about Quarkus, please visit its website: https://quarkus.io/.
 
 ## Running the application in dev mode
 
-You can run your application in dev mode using:
+You can run your application in dev mode that enables live coding using:
 
 ```sh
 ./gradlew quarkusDev
@@ -23,8 +23,10 @@ The application can be packaged using:
 ./gradlew build
 ```
 
-It produces the `theodolite-0.7.0-SNAPSHOT-runner.jar` file in the `/build` directory. Be aware that it’s not
-an _über-jar_ as the dependencies are copied into the `build/lib` directory.
+It produces the `quarkus-run.jar` file in the `build/quarkus-app/` directory.
+Be aware that it’s not an _über-jar_ as the dependencies are copied into the `build/quarkus-app/lib/` directory.
+
+The application is now runnable using `java -jar build/quarkus-app/quarkus-run.jar`.
 
 If you want to build an _über-jar_, execute the following command:
 
@@ -32,12 +34,10 @@ If you want to build an _über-jar_, execute the following command:
 ./gradlew build -Dquarkus.package.type=uber-jar
 ```
 
-The application is now runnable using `java -jar build/theodolite-0.7.0-SNAPSHOT-runner.jar`.
+The application, packaged as an _über-jar_, is now runnable using `java -jar build/*-runner.jar`.
 
 ## Creating a native executable
 
-It is recommended to use the native GraalVM images to create executable jars from Theodolite. For more information please visit the [Native Image guide](https://www.graalvm.org/reference-manual/native-image/).
-
 You can create a native executable using:
 
 ```sh
@@ -55,15 +55,21 @@ You can then execute your native executable with:
 
 If you want to learn more about building native executables, please consult https://quarkus.io/guides/gradle-tooling.
 
-## Build docker images
+## Building container images
 
-For the jvm version use:
+For the JVM version use:
 
 ```sh
 ./gradlew build
 docker build -f src/main/docker/Dockerfile.jvm -t theodolite-jvm .
 ```
 
+Alternatively, you can also use Kaniko to build the image:
+
+```sh
+docker run -it --rm --name kaniko -v "`pwd`":/theodolite --entrypoint "" gcr.io/kaniko-project/executor:debug /kaniko/executor --context /theodolite --dockerfile src/main/docker/Dockerfile.jvm --no-push
+```
+
 For the native image version use:
 
 ```sh
@@ -71,7 +77,7 @@ For the native image version use:
 docker build -f src/main/docker/Dockerfile.native -t theodolite-native .
 ```
 
-## Execute docker images
+## Run a container
 
 Remember to set the environment variables first.
 
diff --git a/theodolite/build.gradle b/theodolite/build.gradle
index 06d451cc24395824650e88d2fe516eb4015a266e..a066e94f09b71720f9392947640b077b153ccb9c 100644
--- a/theodolite/build.gradle
+++ b/theodolite/build.gradle
@@ -1,14 +1,14 @@
 plugins {
-    id 'org.jetbrains.kotlin.jvm' version "1.5.31"
-    id "org.jetbrains.kotlin.plugin.allopen" version "1.5.31"
+    id 'org.jetbrains.kotlin.jvm' version "1.6.10"
+    id "org.jetbrains.kotlin.plugin.allopen" version "1.6.10"
     id 'io.quarkus'
     id "io.gitlab.arturbosch.detekt" version "1.15.0"
     id "org.jlleitschuh.gradle.ktlint" version "10.0.0"
 }
 
 repositories {
-    mavenLocal()
     mavenCentral()
+    mavenLocal()
     jcenter()
 }
 
@@ -26,19 +26,13 @@ dependencies {
     implementation 'com.google.code.gson:gson:2.8.9'
     implementation 'org.slf4j:slf4j-simple:1.7.32'
     implementation 'io.github.microutils:kotlin-logging:2.1.16'
-    //implementation('io.fabric8:kubernetes-client:5.4.1'){force = true}
-    //implementation('io.fabric8:kubernetes-model-core:5.4.1'){force = true}
-    //implementation('io.fabric8:kubernetes-model-common:5.4.1'){force = true}
     implementation 'org.apache.kafka:kafka-clients:2.8.0'
     implementation 'khttp:khttp:1.0.0'
 
-    // compile 'junit:junit:4.12'
-
     testImplementation 'io.quarkus:quarkus-junit5'
     testImplementation 'io.quarkus:quarkus-test-kubernetes-client'
     testImplementation 'io.rest-assured:rest-assured'
     testImplementation 'org.junit-pioneer:junit-pioneer:1.5.0'
-    //testImplementation 'io.fabric8:kubernetes-server-mock:5.10.1'
     testImplementation "org.mockito.kotlin:mockito-kotlin:4.0.0"
 }
 
diff --git a/theodolite/build_jvm.sh b/theodolite/build_jvm.sh
deleted file mode 100755
index f4dd32fc5228576f09e95f0e8ac06fa08ea6acc7..0000000000000000000000000000000000000000
--- a/theodolite/build_jvm.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-
-./gradlew build -x test
-
-docker build -f src/main/docker/Dockerfile.jvm -t quarkus/theodolite-jvm .
-
-docker run -i --rm -p 8080:8080 quarkus/theodolite-jvm
diff --git a/theodolite/build_native.sh b/theodolite/build_native.sh
deleted file mode 100755
index c2d7d81f35a24af951005bb30c52a8ab494ddb64..0000000000000000000000000000000000000000
--- a/theodolite/build_native.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-
-./gradlew build -Dquarkus.package.type=native -x test
-
-docker build -f src/main/docker/Dockerfile.native -t quarkus/theodolite .
-
-docker run -i --rm -p 8080:8080 quarkus/theodolite
diff --git a/theodolite/crd/crd-benchmark.yaml b/theodolite/crd/crd-benchmark.yaml
index 55bf6ed69e44287905bce85b63f66bb43ea65669..c901e61360c05b2f1cf2b1767a20f624eb262231 100644
--- a/theodolite/crd/crd-benchmark.yaml
+++ b/theodolite/crd/crd-benchmark.yaml
@@ -20,7 +20,7 @@ spec:
         properties:
           spec:
             type: object
-            required: ["sut", "loadGenerator", "resourceTypes", "loadTypes", "kafkaConfig"]
+            required: ["sut", "loadGenerator", "resourceTypes", "loadTypes"]
             properties:
               name:
                 description: This field exists only for technical reasons and should not be set by the user. The value of the field will be overwritten.
diff --git a/theodolite/gradle.properties b/theodolite/gradle.properties
index 76ed8f2136f14263460bc391d420c78de200d659..fd5768bc24a65dbd43b3ea770c854ae7c0da0a91 100644
--- a/theodolite/gradle.properties
+++ b/theodolite/gradle.properties
@@ -1,8 +1,8 @@
 #Gradle properties
-quarkusPluginVersion=2.5.2.Final
-quarkusPlatformArtifactId=quarkus-bom
 quarkusPluginId=io.quarkus
+quarkusPluginVersion=2.6.3.Final
 quarkusPlatformGroupId=io.quarkus.platform
-quarkusPlatformVersion=2.5.2.Final
+quarkusPlatformArtifactId=quarkus-bom
+quarkusPlatformVersion=2.6.3.Final
 
 #org.gradle.logging.level=INFO
\ No newline at end of file
diff --git a/theodolite/src/main/docker/Dockerfile.jvm b/theodolite/src/main/docker/Dockerfile.jvm
index 03035752038fee2e5ce4c477c61adc84991f3729..e33d7c379a4336610c16d59b9d3315a1e8abad2b 100644
--- a/theodolite/src/main/docker/Dockerfile.jvm
+++ b/theodolite/src/main/docker/Dockerfile.jvm
@@ -18,38 +18,24 @@
 #
 # Then run the container using :
 #
-# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/theodolite-jvm
+# docker run -i --rm -p 8080:8080 quarkus/theodolite-jvm
 #
 ###
-FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4 
+FROM registry.access.redhat.com/ubi8/openjdk-11-runtime:1.10
 
-ARG JAVA_PACKAGE=java-11-openjdk-headless
-ARG RUN_JAVA_VERSION=1.3.8
 ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
-# Install java and the run-java script
-# Also set up permissions for user `1001`
-RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
-    && microdnf update \
-    && microdnf clean all \
-    && mkdir /deployments \
-    && chown 1001 /deployments \
-    && chmod "g+rwX" /deployments \
-    && chown 1001:root /deployments \
-    && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
-    && chown 1001 /deployments/run-java.sh \
-    && chmod 540 /deployments/run-java.sh \
-    && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security
 
 # Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
 ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
+
 # We make four distinct layers so if there are application changes the library layers can be re-used
-COPY --chown=1001 build/quarkus-app/lib/ /deployments/lib/
-COPY --chown=1001 build/quarkus-app/*.jar /deployments/
-COPY --chown=1001 build/quarkus-app/app/ /deployments/app/
-COPY --chown=1001 build/quarkus-app/quarkus/ /deployments/quarkus/
+COPY --chown=185 build/quarkus-app/lib/ /deployments/lib/
+COPY --chown=185 build/quarkus-app/*.jar /deployments/
+COPY --chown=185 build/quarkus-app/app/ /deployments/app/
+COPY --chown=185 build/quarkus-app/quarkus/ /deployments/quarkus/
 
 EXPOSE 8080
-USER 1001
+USER 185
 
-ENTRYPOINT [ "/deployments/run-java.sh" ]
+ENTRYPOINT [ "java", "-jar", "/deployments/quarkus-run.jar" ]
 
diff --git a/theodolite/src/main/docker/Dockerfile.legacy-jar b/theodolite/src/main/docker/Dockerfile.legacy-jar
index f9dffd188570c14087bafaec838b58b61a4e5912..aa5908c4ed42f005fa67c17fd2c3b3e00978228a 100644
--- a/theodolite/src/main/docker/Dockerfile.legacy-jar
+++ b/theodolite/src/main/docker/Dockerfile.legacy-jar
@@ -18,34 +18,20 @@
 #
 # Then run the container using :
 #
-# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/theodolite-legacy-jar
+# docker run -i --rm -p 8080:8080 quarkus/theodolite-legacy-jar
 #
 ###
-FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4 
+FROM registry.access.redhat.com/ubi8/openjdk-11-runtime:1.10
 
-ARG JAVA_PACKAGE=java-11-openjdk-headless
-ARG RUN_JAVA_VERSION=1.3.8
 ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
-# Install java and the run-java script
-# Also set up permissions for user `1001`
-RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
-    && microdnf update \
-    && microdnf clean all \
-    && mkdir /deployments \
-    && chown 1001 /deployments \
-    && chmod "g+rwX" /deployments \
-    && chown 1001:root /deployments \
-    && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
-    && chown 1001 /deployments/run-java.sh \
-    && chmod 540 /deployments/run-java.sh \
-    && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security
 
 # Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
 ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
+
 COPY build/lib/* /deployments/lib/
-COPY build/*-runner.jar /deployments/app.jar
+COPY build/*-runner.jar /deployments/quarkus-run.jar
 
 EXPOSE 8080
-USER 1001
+USER 185
 
-ENTRYPOINT [ "/deployments/run-java.sh" ]
+ENTRYPOINT [ "java", "-jar", "/deployments/quarkus-run.jar" ]
diff --git a/theodolite/src/main/docker/Dockerfile.native b/theodolite/src/main/docker/Dockerfile.native
index 04a1dd6f2b6cc99511bf705eed5d98be1da25b05..34ccd6622bf2fba6f9707989fffd9bb6390a4a8b 100644
--- a/theodolite/src/main/docker/Dockerfile.native
+++ b/theodolite/src/main/docker/Dockerfile.native
@@ -14,12 +14,12 @@
 # docker run -i --rm -p 8080:8080 quarkus/theodolite
 #
 ###
-FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4
-WORKDIR /deployments/
-RUN chown 1001 /deployments \
-    && chmod "g+rwX" /deployments \
-    && chown 1001:root /deployments
-COPY --chown=1001:root build/*-runner /deployments/application
+FROM quay.io/quarkus/quarkus-micro-image:1.0
+WORKDIR /work/
+RUN chown 1001 /work \
+    && chmod "g+rwX" /work \
+    && chown 1001:root /work
+COPY --chown=1001:root build/*-runner /work/application
 
 EXPOSE 8080
 USER 1001
diff --git a/theodolite/src/main/docker/Dockerfile.native-distroless b/theodolite/src/main/docker/Dockerfile.native-distroless
index 1ed64110dd931bf3fea9100e3318318ad40b6966..951dfb64bee56e277d057c8f9e97796e88f30ac2 100644
--- a/theodolite/src/main/docker/Dockerfile.native-distroless
+++ b/theodolite/src/main/docker/Dockerfile.native-distroless
@@ -15,8 +15,7 @@
 #
 ###
 FROM quay.io/quarkus/quarkus-distroless-image:1.0
-WORKDIR /deployments/
-COPY build/*-runner /deployments/application
+COPY build/*-runner /application
 
 EXPOSE 8080
 USER nonroot
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/ConfigMapResourceSet.kt b/theodolite/src/main/kotlin/theodolite/benchmark/ConfigMapResourceSet.kt
index 27e3206ad7b60d61cab94caaef8a3279d834fe65..f85b83497e5d69e43c1d4784ef86170a5436e929 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/ConfigMapResourceSet.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/ConfigMapResourceSet.kt
@@ -12,7 +12,7 @@ import java.lang.IllegalArgumentException
 
 @RegisterForReflection
 @JsonDeserialize
-class ConfigMapResourceSet: ResourceSet, KubernetesResource {
+class ConfigMapResourceSet : ResourceSet, KubernetesResource {
     lateinit var name: String
     lateinit var files: List<String> // load all files, iff files is not set
 
@@ -26,30 +26,35 @@ class ConfigMapResourceSet: ResourceSet, KubernetesResource {
                 .withName(name)
                 .get() ?: throw DeploymentFailedException("Cannot find ConfigMap with name '$name'."))
                 .data
-                .filter { it.key.endsWith(".yaml") }
+                .filter { it.key.endsWith(".yaml") || it.key.endsWith(".yml")}
         } catch (e: KubernetesClientException) {
             throw DeploymentFailedException("Cannot find or read ConfigMap with name '$name'.", e)
         }
 
-        if (::files.isInitialized){
-            resources = resources.filter { files.contains(it.key) }
-
-            if (resources.size != files.size) {
+        if (::files.isInitialized) {
+            val filteredResources = resources.filter { files.contains(it.key) }
+            if (filteredResources.size != files.size) {
                 throw DeploymentFailedException("Could not find all specified Kubernetes manifests files")
             }
+            resources = filteredResources
         }
 
         return try {
             resources
-                .map { Pair(
-                    getKind(resource = it.value),
-                    it) }
+                .map {
+                    Pair(
+                        getKind(resource = it.value),
+                        it
+                    )
+                }
                 .map {
                     Pair(
                         it.second.key,
-                        loader.loadK8sResource(it.first, it.second.value)) }
+                        loader.loadK8sResource(it.first, it.second.value)
+                    )
+                }
         } catch (e: IllegalArgumentException) {
-            throw DeploymentFailedException("Can not create resource set from specified configmap", e)
+            throw DeploymentFailedException("Cannot create resource set from specified ConfigMap", e)
         }
 
     }
@@ -58,10 +63,7 @@ class ConfigMapResourceSet: ResourceSet, KubernetesResource {
         val parser = YamlParserFromString()
         val resourceAsMap = parser.parse(resource, HashMap<String, String>()::class.java)
 
-        return try {
-            resourceAsMap?.get("kind") !!
-        } catch (e: NullPointerException) {
-            throw DeploymentFailedException( "Could not find field kind of Kubernetes resource: ${resourceAsMap?.get("name")}", e)
-        }
+        return resourceAsMap?.get("kind")
+            ?: throw DeploymentFailedException("Could not find field kind of Kubernetes resource: ${resourceAsMap?.get("name")}")
     }
 }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/FileSystemResourceSet.kt b/theodolite/src/main/kotlin/theodolite/benchmark/FileSystemResourceSet.kt
index e769f8b9883b98d9787f2de65571fc94056c3b9c..f830232de4b6956fa0f989cae131903377862e6c 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/FileSystemResourceSet.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/FileSystemResourceSet.kt
@@ -28,7 +28,7 @@ class FileSystemResourceSet: ResourceSet, KubernetesResource {
         return try {
             File(path)
                 .list() !!
-                .filter { it.endsWith(".yaml") } // consider only yaml files, e.g. ignore readme files
+                .filter { it.endsWith(".yaml") || it.endsWith(".yml") }
                 .map {
                     loadSingleResource(resourceURL = it, client = client)
                 }
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt
index 70d8b241c84d1c6875c8da3d74cd90b3f57956d6..d42c2ea3c0ed5394fdcf5b89be0fe0470a15ba62 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt
@@ -39,7 +39,7 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
     lateinit var name: String
     lateinit var resourceTypes: List<TypeName>
     lateinit var loadTypes: List<TypeName>
-    lateinit var kafkaConfig: KafkaConfig
+    var kafkaConfig: KafkaConfig? = null
     lateinit var infrastructure: Resources
     lateinit var sut: Resources
     lateinit var loadGenerator: Resources
@@ -110,6 +110,9 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
                 patcherFactory.createPatcher(it.patcher, appResources + loadGenResources).patch(override.value)
             }
         }
+
+        val kafkaConfig = this.kafkaConfig
+
         return KubernetesBenchmarkDeployment(
             sutBeforeActions = sut.beforeActions,
             sutAfterActions = sut.afterActions,
@@ -119,8 +122,8 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
             loadGenResources = loadGenResources.map { it.second },
             loadGenerationDelay = loadGenerationDelay,
             afterTeardownDelay = afterTeardownDelay,
-            kafkaConfig = hashMapOf("bootstrap.servers" to kafkaConfig.bootstrapServer),
-            topics = kafkaConfig.topics,
+            kafkaConfig = if (kafkaConfig != null) hashMapOf("bootstrap.servers" to kafkaConfig.bootstrapServer) else mapOf(),
+            topics = kafkaConfig?.topics ?: listOf(),
             client = this.client
         )
     }
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt
index 9d32a4eeab656143e10b5057a173e04245d6f22b..3331444a17b4c2a1aa4411c1e27b3d1e087f8841 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt
@@ -31,7 +31,7 @@ class KubernetesBenchmarkDeployment(
     val loadGenResources: List<KubernetesResource>,
     private val loadGenerationDelay: Long,
     private val afterTeardownDelay: Long,
-    private val kafkaConfig: HashMap<String, Any>,
+    private val kafkaConfig: Map<String, Any>,
     private val topics: List<KafkaConfig.TopicWrapper>,
     private val client: NamespacedKubernetesClient
 ) : BenchmarkDeployment {
@@ -46,9 +46,12 @@ class KubernetesBenchmarkDeployment(
      *  - Deploy the needed resources.
      */
     override fun setup() {
-        val kafkaTopics = this.topics.filter { !it.removeOnly }
-            .map { NewTopic(it.name, it.numPartitions, it.replicationFactor) }
-        kafkaController.createTopics(kafkaTopics)
+        if (this.topics.isNotEmpty()) {
+            val kafkaTopics = this.topics
+                .filter { !it.removeOnly }
+                .map { NewTopic(it.name, it.numPartitions, it.replicationFactor) }
+            kafkaController.createTopics(kafkaTopics)
+        }
         sutBeforeActions.forEach { it.exec(client = client) }
         appResources.forEach { kubernetesManager.deploy(it) }
         logger.info { "Wait ${this.loadGenerationDelay} seconds before starting the load generator." }
@@ -69,7 +72,9 @@ class KubernetesBenchmarkDeployment(
         loadGenAfterActions.forEach { it.exec(client = client) }
         appResources.forEach { kubernetesManager.remove(it) }
         sutAfterActions.forEach { it.exec(client = client) }
-        kafkaController.removeTopics(this.topics.map { topic -> topic.name })
+        if (this.topics.isNotEmpty()) {
+            kafkaController.removeTopics(this.topics.map { topic -> topic.name })
+        }
         ResourceByLabelHandler(client).removePods(
             labelName = LAG_EXPORTER_POD_LABEL_NAME,
             labelValue = LAG_EXPORTER_POD_LABEL_VALUE
diff --git a/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt b/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt
index 40f5b7ddbbfc9da4514b8a88946d97149b94b390..6dcfb582655ff9295aedd63d8c30cbac7daae2b3 100644
--- a/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt
+++ b/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt
@@ -24,7 +24,7 @@ class BenchmarkStateChecker(
         Thread {
             while (running) {
                 updateBenchmarkStatus()
-                Thread.sleep(100 * 1)
+                Thread.sleep(1000)
             }
         }.start()
     }
diff --git a/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteController.kt b/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteController.kt
index 2b6f83c76ce6e31f85cdfec1962f9523c3d297b8..5f4180b0b4b58fa94b979c71998314baae63a91b 100644
--- a/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteController.kt
+++ b/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteController.kt
@@ -37,9 +37,9 @@ class TheodoliteController(
      */
     fun run() {
         sleep(5000) // wait until all states are correctly set
+        benchmarkStateChecker.start(true)
         while (true) {
             reconcile()
-            benchmarkStateChecker.start(true)
             sleep(2000)
         }
     }
@@ -98,11 +98,11 @@ class TheodoliteController(
                     }
                 else -> {
                     executionStateHandler.setExecutionState(execution.name, ExecutionState.FAILURE)
-                    logger.warn { "Unexpected execution state, set state to ${ExecutionState.FAILURE.value}" }
+                    logger.warn { "Unexpected execution state, set state to ${ExecutionState.FAILURE.value}." }
                 }
             }
         } catch (e: Exception) {
-                EventCreator().createEvent(
+            EventCreator().createEvent(
                 executionName = execution.name,
                 type = "WARNING",
                 reason = "Execution failed",
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/MatchLabelPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/MatchLabelPatcher.kt
new file mode 100644
index 0000000000000000000000000000000000000000..30ff73b5da3b551119ad085adbc982180e4fc066
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/patcher/MatchLabelPatcher.kt
@@ -0,0 +1,34 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.fabric8.kubernetes.api.model.apps.StatefulSet
+
+/**
+ * This patcher is able to set the `spec.selector.matchLabels` for a `Deployment` or `StatefulSet` Kubernetes resource.
+ *
+ * @property k8sResource The Kubernetes manifests to patch
+ * @property variableName The matchLabel which should be set
+ */
+class MatchLabelPatcher(private val k8sResource: KubernetesResource, val variableName: String) :
+    AbstractPatcher(k8sResource) {
+
+    override fun <String> patch(labelValue: String) {
+        if (labelValue is kotlin.String) {
+            when (k8sResource) {
+                is Deployment -> {
+                    if (k8sResource.spec.selector.matchLabels == null) {
+                        k8sResource.spec.selector.matchLabels = mutableMapOf()
+                    }
+                    k8sResource.spec.selector.matchLabels[this.variableName] = labelValue
+                }
+                is StatefulSet -> {
+                    if (k8sResource.spec.selector.matchLabels == null) {
+                        k8sResource.spec.selector.matchLabels = mutableMapOf()
+                    }
+                    k8sResource.spec.selector.matchLabels[this.variableName] = labelValue
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/PatcherFactory.kt b/theodolite/src/main/kotlin/theodolite/patcher/PatcherFactory.kt
index 88b3e19e999a889cdcb8345ca7c90c37a6e6d275..e92de4dba7de298c9df76600f2c6785f5878103e 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/PatcherFactory.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/PatcherFactory.kt
@@ -79,6 +79,14 @@ class PatcherFactory {
                     k8sResource = resource,
                     variableName = patcherDefinition.properties["variableName"]!!
                 )
+                "MatchLabelPatcher" -> MatchLabelPatcher(
+                    k8sResource = resource,
+                    variableName = patcherDefinition.properties["variableName"]!!
+                )
+                "TemplateLabelPatcher" -> TemplateLabelPatcher(
+                    k8sResource = resource,
+                    variableName = patcherDefinition.properties["variableName"]!!
+                )
                 "ImagePatcher" -> ImagePatcher(
                     k8sResource = resource,
                     container = patcherDefinition.properties["container"]!!
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/TemplateLabelPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/TemplateLabelPatcher.kt
new file mode 100644
index 0000000000000000000000000000000000000000..a524e5c40f90ccf98dc95003cc33dcfceb6f8598
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/patcher/TemplateLabelPatcher.kt
@@ -0,0 +1,34 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.fabric8.kubernetes.api.model.apps.StatefulSet
+
+/**
+ * This patcher is able to set the field `spec.template.metadata.labels` for a `Deployment` or `StatefulSet` Kubernetes resource.
+ *
+ * @property k8sResource The Kubernetes manifests to patch
+ * @property variableName The label which should be set
+ */
+class TemplateLabelPatcher(private val k8sResource: KubernetesResource, val variableName: String) :
+    AbstractPatcher(k8sResource) {
+
+    override fun <String> patch(labelValue: String) {
+        if (labelValue is kotlin.String) {
+            when (k8sResource) {
+                is Deployment -> {
+                    if (k8sResource.spec.template.metadata.labels == null) {
+                        k8sResource.spec.template.metadata.labels = mutableMapOf()
+                    }
+                    k8sResource.spec.template.metadata.labels[this.variableName] = labelValue
+                }
+                is StatefulSet -> {
+                    if (k8sResource.spec.template.metadata.labels == null) {
+                        k8sResource.spec.template.metadata.labels = mutableMapOf()
+                    }
+                    k8sResource.spec.template.metadata.labels[this.variableName] = labelValue
+                }
+            }
+        }
+    }
+}
\ No newline at end of file