diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 84edb0c1fd9770e37c9dd8510aef2db017edec49..48d3681dab13b31ade355d9a1f13704cdc2e9c2e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -143,6 +143,7 @@ test-benchmarks:
     - build-benchmarks
   script: ./gradlew test --continue
   artifacts:
+    when: always
     reports:
       junit:
         - "theodolite-benchmarks/**/build/test-results/test/TEST-*.xml"
@@ -204,10 +205,8 @@ spotbugs-benchmarks:
     - changes:
       - theodolite-benchmarks/*
       - theodolite-benchmarks/$JAVA_PROJECT_NAME/**/*
-      - theodolite-benchmarks/kstreams-commons/**/*
-      - theodolite-benchmarks/flink-commons/**/*
-      - theodolite-benchmarks/load-generator-commons/**/*
-      if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $IMAGE_NAME && $JAVA_PROJECT_NAME"
+      - theodolite-benchmarks/{$JAVA_PROJECT_DEPS}/**/*
+      if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $IMAGE_NAME && $JAVA_PROJECT_NAME && $JAVA_PROJECT_DEPS"
     - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $IMAGE_NAME && $JAVA_PROJECT_NAME"
       when: manual
       allow_failure: true
@@ -217,72 +216,140 @@ deploy-uc1-kstreams:
   variables:
     IMAGE_NAME: "theodolite-uc1-kstreams-app"
     JAVA_PROJECT_NAME: "uc1-kstreams"
+    JAVA_PROJECT_DEPS: "kstreams-commons"
 
 deploy-uc2-kstreams:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc2-kstreams-app"
     JAVA_PROJECT_NAME: "uc2-kstreams"
+    JAVA_PROJECT_DEPS: "kstreams-commons"
 
 deploy-uc3-kstreams:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc3-kstreams-app"
     JAVA_PROJECT_NAME: "uc3-kstreams"
+    JAVA_PROJECT_DEPS: "kstreams-commons"
 
 deploy-uc4-kstreams:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc4-kstreams-app"
     JAVA_PROJECT_NAME: "uc4-kstreams"
+    JAVA_PROJECT_DEPS: "kstreams-commons"
 
 deploy-uc1-flink:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc1-flink"
     JAVA_PROJECT_NAME: "uc1-flink"
+    JAVA_PROJECT_DEPS: "flink-commons"
 
 deploy-uc2-flink:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc2-flink"
     JAVA_PROJECT_NAME: "uc2-flink"
+    JAVA_PROJECT_DEPS: "flink-commons"
 
 deploy-uc3-flink:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc3-flink"
     JAVA_PROJECT_NAME: "uc3-flink"
+    JAVA_PROJECT_DEPS: "flink-commons"
 
 deploy-uc4-flink:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc4-flink"
     JAVA_PROJECT_NAME: "uc4-flink"
+    JAVA_PROJECT_DEPS: "flink-commons"
+
+deploy-uc1-beam-flink:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc1-beam-flink"
+    JAVA_PROJECT_NAME: "uc1-beam-flink"
+    JAVA_PROJECT_DEPS: "beam-commons,uc1-beam"
+
+deploy-uc2-beam-flink:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc2-beam-flink"
+    JAVA_PROJECT_NAME: "uc2-beam-flink"
+    JAVA_PROJECT_DEPS: "beam-commons,uc2-beam"
+
+deploy-uc3-beam-flink:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc3-beam-flink"
+    JAVA_PROJECT_NAME: "uc3-beam-flink"
+    JAVA_PROJECT_DEPS: "beam-commons,uc3-beam"
+
+deploy-uc4-beam-flink:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc4-beam-flink"
+    JAVA_PROJECT_NAME: "uc4-beam-flink"
+    JAVA_PROJECT_DEPS: "beam-commons,uc4-beam"
+
+deploy-uc1-beam-samza:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc1-beam-samza"
+    JAVA_PROJECT_NAME: "uc1-beam-samza"
+    JAVA_PROJECT_DEPS: "beam-commons,uc1-beam"
+
+deploy-uc2-beam-samza:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc2-beam-samza"
+    JAVA_PROJECT_NAME: "uc2-beam-samza"
+    JAVA_PROJECT_DEPS: "beam-commons,uc2-beam"
+
+deploy-uc3-beam-samza:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc3-beam-samza"
+    JAVA_PROJECT_NAME: "uc3-beam-samza"
+    JAVA_PROJECT_DEPS: "beam-commons,uc3-beam"
+
+deploy-uc4-beam-samza:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc4-beam-samza"
+    JAVA_PROJECT_NAME: "uc4-beam-samza"
+    JAVA_PROJECT_DEPS: "beam-commons,uc4-beam"
 
 deploy-uc1-load-generator:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc1-workload-generator"
     JAVA_PROJECT_NAME: "uc1-load-generator"
+    JAVA_PROJECT_DEPS: "load-generator-commons"
 
 deploy-uc2-load-generator:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc2-workload-generator"
     JAVA_PROJECT_NAME: "uc2-load-generator"
+    JAVA_PROJECT_DEPS: "load-generator-commons"
 
 deploy-uc3-load-generator:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc3-workload-generator"
     JAVA_PROJECT_NAME: "uc3-load-generator"
+    JAVA_PROJECT_DEPS: "load-generator-commons"
 
 deploy-uc4-load-generator:
   extends: .deploy-benchmarks
   variables:
     IMAGE_NAME: "theodolite-uc4-workload-generator"
     JAVA_PROJECT_NAME: "uc4-load-generator"
+    JAVA_PROJECT_DEPS: "load-generator-commons"
       
 
 # Theodolite Framework
@@ -330,6 +397,7 @@ test-theodolite:
     #- build-theodolite-native
   script: ./gradlew test --stacktrace
   artifacts:
+    when: always
     reports:
       junit:
         - "theodolite/**/build/test-results/test/TEST-*.xml"
@@ -408,6 +476,22 @@ test-slo-checker-dropped-records-kstreams:
     - when: manual
       allow_failure: true
 
+test-slo-checker-generic:
+  stage: test
+  needs: []
+  image: python:3.7-slim
+  before_script:
+    - cd slo-checker/generic
+  script:
+    - pip install -r requirements.txt
+    - cd app
+    - python -m unittest
+  rules:
+    - changes:
+      - slo-checker/generic/**/*
+    - when: manual
+      allow_failure: true
+
 deploy-slo-checker-lag-trend:
   stage: deploy
   extends:
@@ -444,6 +528,24 @@ deploy-slo-checker-dropped-records-kstreams:
       when: manual
       allow_failure: true
 
+deploy-slo-checker-generic:
+  stage: deploy
+  extends:
+    - .kaniko-push
+  needs:
+    - test-slo-checker-generic
+  before_script:
+    - cd slo-checker/generic
+  variables:
+    IMAGE_NAME: theodolite-slo-checker-generic
+  rules:
+    - changes:
+      - slo-checker/generic/**/*
+      if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW"
+    - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW"
+      when: manual
+      allow_failure: true
+
 
 # Theodolite Random Scheduler
 
diff --git a/binder/requirements.txt b/binder/requirements.txt
new file mode 120000
index 0000000000000000000000000000000000000000..6de15663a8c83876719aa07d6cb09b5a7b71df21
--- /dev/null
+++ b/binder/requirements.txt
@@ -0,0 +1 @@
+../analysis/requirements.txt
\ No newline at end of file
diff --git a/docs/_config.yml b/docs/_config.yml
index a6c6eb709d1a2b904421cee05e9d22fe94d2005a..0d2a1aa774a83347c80b538a97d5dbfa1b7639b3 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -4,7 +4,6 @@ description: >-
   scalability of cloud-native applications.
 
 remote_theme: pmarsceill/just-the-docs
-#color_scheme: "dark"
 aux_links:
     "Theodolite on GitHub":
       - "//github.com/cau-se/theodolite"
@@ -14,4 +13,5 @@ exclude:
   - Gemfile
   - Gemfile.lock
   - README.md
-  - vendor
+  - vendor/
+  - drafts/
diff --git a/docs/api-reference/crds.md b/docs/api-reference/crds.md
index a8ce1d8d0ad2827fe07e0eee0a60a0cd2c71c93c..a91e991609f5fe10e90793f34f2ad04c6c5576d0 100644
--- a/docs/api-reference/crds.md
+++ b/docs/api-reference/crds.md
@@ -257,6 +257,24 @@ The loadGenResourceSets specifies all Kubernetes resources required to start the
         </tr>
     </thead>
     <tbody><tr>
+        <td><b><a href="#benchmarkspecloadgeneratorafteractionsindex">afterActions</a></b></td>
+        <td>[]object</td>
+        <td>
+          Load generator after actions are executed after the teardown of the load generator.<br/>
+          <br/>
+            <i>Default</i>: []<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecloadgeneratorbeforeactionsindex">beforeActions</a></b></td>
+        <td>[]object</td>
+        <td>
+          Load generator before actions are executed before the load generator is started.<br/>
+          <br/>
+            <i>Default</i>: []<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
         <td><b><a href="#benchmarkspecloadgeneratorresourcesindex">resources</a></b></td>
         <td>[]object</td>
         <td>
@@ -269,6 +287,272 @@ The loadGenResourceSets specifies all Kubernetes resources required to start the
 </table>
 
 
+### benchmark.spec.loadGenerator.afterActions[index]
+<sup><sup>[↩ Parent](#benchmarkspecloadgenerator)</sup></sup>
+
+
+
+
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b><a href="#benchmarkspecloadgeneratorafteractionsindexexec">exec</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies command to be executed.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecloadgeneratorafteractionsindexselector">selector</a></b></td>
+        <td>object</td>
+        <td>
+          The selector specifies which resource should be selected for the execution of the command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.loadGenerator.afterActions[index].exec
+<sup><sup>[↩ Parent](#benchmarkspecloadgeneratorafteractionsindex)</sup></sup>
+
+
+
+Specifies command to be executed.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>command</b></td>
+        <td>[]string</td>
+        <td>
+          The command to be executed as string array.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b>timeoutSeconds</b></td>
+        <td>integer</td>
+        <td>
+          Specifies the timeout (in seconds) for the specified command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.loadGenerator.afterActions[index].selector
+<sup><sup>[↩ Parent](#benchmarkspecloadgeneratorafteractionsindex)</sup></sup>
+
+
+
+The selector specifies which resource should be selected for the execution of the command.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>container</b></td>
+        <td>string</td>
+        <td>
+          Specifies the container.<br/>
+          <br/>
+            <i>Default</i>: <br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecloadgeneratorafteractionsindexselectorpod">pod</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies the pod.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.loadGenerator.afterActions[index].selector.pod
+<sup><sup>[↩ Parent](#benchmarkspecloadgeneratorafteractionsindexselector)</sup></sup>
+
+
+
+Specifies the pod.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>matchLabels</b></td>
+        <td>map[string]string</td>
+        <td>
+          The matchLabels of the desired pod.<br/>
+          <br/>
+            <i>Default</i>: map[]<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.loadGenerator.beforeActions[index]
+<sup><sup>[↩ Parent](#benchmarkspecloadgenerator)</sup></sup>
+
+
+
+
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b><a href="#benchmarkspecloadgeneratorbeforeactionsindexexec">exec</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies command to be executed.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecloadgeneratorbeforeactionsindexselector">selector</a></b></td>
+        <td>object</td>
+        <td>
+          The selector specifies which resource should be selected for the execution of the command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.loadGenerator.beforeActions[index].exec
+<sup><sup>[↩ Parent](#benchmarkspecloadgeneratorbeforeactionsindex)</sup></sup>
+
+
+
+Specifies command to be executed.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>command</b></td>
+        <td>[]string</td>
+        <td>
+          The command to be executed as string array.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b>timeoutSeconds</b></td>
+        <td>integer</td>
+        <td>
+          Specifies the timeout (in seconds) for the specified command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.loadGenerator.beforeActions[index].selector
+<sup><sup>[↩ Parent](#benchmarkspecloadgeneratorbeforeactionsindex)</sup></sup>
+
+
+
+The selector specifies which resource should be selected for the execution of the command.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>container</b></td>
+        <td>string</td>
+        <td>
+          Specifies the container.<br/>
+          <br/>
+            <i>Default</i>: <br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecloadgeneratorbeforeactionsindexselectorpod">pod</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies the pod.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.loadGenerator.beforeActions[index].selector.pod
+<sup><sup>[↩ Parent](#benchmarkspecloadgeneratorbeforeactionsindexselector)</sup></sup>
+
+
+
+Specifies the pod.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>matchLabels</b></td>
+        <td>map[string]string</td>
+        <td>
+          The matchLabels of the desired pod.<br/>
+          <br/>
+            <i>Default</i>: map[]<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
 ### benchmark.spec.loadGenerator.resources[index]
 <sup><sup>[↩ Parent](#benchmarkspecloadgenerator)</sup></sup>
 
@@ -550,6 +834,24 @@ The appResourceSets specifies all Kubernetes resources required to start the sut
         </tr>
     </thead>
     <tbody><tr>
+        <td><b><a href="#benchmarkspecsutafteractionsindex">afterActions</a></b></td>
+        <td>[]object</td>
+        <td>
+          <br/>
+          <br/>
+            <i>Default</i>: []<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecsutbeforeactionsindex">beforeActions</a></b></td>
+        <td>[]object</td>
+        <td>
+          SUT before actions are executed before the SUT is started.<br/>
+          <br/>
+            <i>Default</i>: []<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
         <td><b><a href="#benchmarkspecsutresourcesindex">resources</a></b></td>
         <td>[]object</td>
         <td>
@@ -562,7 +864,7 @@ The appResourceSets specifies all Kubernetes resources required to start the sut
 </table>
 
 
-### benchmark.spec.sut.resources[index]
+### benchmark.spec.sut.afterActions[index]
 <sup><sup>[↩ Parent](#benchmarkspecsut)</sup></sup>
 
 
@@ -579,29 +881,29 @@ The appResourceSets specifies all Kubernetes resources required to start the sut
         </tr>
     </thead>
     <tbody><tr>
-        <td><b><a href="#benchmarkspecsutresourcesindexconfigmap">configMap</a></b></td>
+        <td><b><a href="#benchmarkspecsutafteractionsindexexec">exec</a></b></td>
         <td>object</td>
         <td>
-          The configMap resourceSet loads the Kubernetes manifests from an Kubernetes configMap.<br/>
+          Specifies command to be executed.<br/>
         </td>
         <td>false</td>
       </tr><tr>
-        <td><b><a href="#benchmarkspecsutresourcesindexfilesystem">fileSystem</a></b></td>
+        <td><b><a href="#benchmarkspecsutafteractionsindexselector">selector</a></b></td>
         <td>object</td>
         <td>
-          The fileSystem resourceSet loads the Kubernetes manifests from the filesystem.<br/>
+          The selector specifies which resource should be selected for the execution of the command.<br/>
         </td>
         <td>false</td>
       </tr></tbody>
 </table>
 
 
-### benchmark.spec.sut.resources[index].configMap
-<sup><sup>[↩ Parent](#benchmarkspecsutresourcesindex)</sup></sup>
+### benchmark.spec.sut.afterActions[index].exec
+<sup><sup>[↩ Parent](#benchmarkspecsutafteractionsindex)</sup></sup>
 
 
 
-The configMap resourceSet loads the Kubernetes manifests from an Kubernetes configMap.
+Specifies command to be executed.
 
 <table>
     <thead>
@@ -613,7 +915,273 @@ The configMap resourceSet loads the Kubernetes manifests from an Kubernetes conf
         </tr>
     </thead>
     <tbody><tr>
-        <td><b>files</b></td>
+        <td><b>command</b></td>
+        <td>[]string</td>
+        <td>
+          The command to be executed as string array.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b>timeoutSeconds</b></td>
+        <td>integer</td>
+        <td>
+          Specifies the timeout (in seconds) for the specified command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.sut.afterActions[index].selector
+<sup><sup>[↩ Parent](#benchmarkspecsutafteractionsindex)</sup></sup>
+
+
+
+The selector specifies which resource should be selected for the execution of the command.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>container</b></td>
+        <td>string</td>
+        <td>
+          Specifies the container.<br/>
+          <br/>
+            <i>Default</i>: <br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecsutafteractionsindexselectorpod">pod</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies the pod.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.sut.afterActions[index].selector.pod
+<sup><sup>[↩ Parent](#benchmarkspecsutafteractionsindexselector)</sup></sup>
+
+
+
+Specifies the pod.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>matchLabels</b></td>
+        <td>map[string]string</td>
+        <td>
+          The matchLabels of the desired pod.<br/>
+          <br/>
+            <i>Default</i>: map[]<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.sut.beforeActions[index]
+<sup><sup>[↩ Parent](#benchmarkspecsut)</sup></sup>
+
+
+
+
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b><a href="#benchmarkspecsutbeforeactionsindexexec">exec</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies command to be executed.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecsutbeforeactionsindexselector">selector</a></b></td>
+        <td>object</td>
+        <td>
+          The selector specifies which resource should be selected for the execution of the command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.sut.beforeActions[index].exec
+<sup><sup>[↩ Parent](#benchmarkspecsutbeforeactionsindex)</sup></sup>
+
+
+
+Specifies command to be executed.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>command</b></td>
+        <td>[]string</td>
+        <td>
+          The command to be executed as string array.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b>timeoutSeconds</b></td>
+        <td>integer</td>
+        <td>
+          Specifies the timeout (in seconds) for the specified command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.sut.beforeActions[index].selector
+<sup><sup>[↩ Parent](#benchmarkspecsutbeforeactionsindex)</sup></sup>
+
+
+
+The selector specifies which resource should be selected for the execution of the command.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>container</b></td>
+        <td>string</td>
+        <td>
+          Specifies the container.<br/>
+          <br/>
+            <i>Default</i>: <br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecsutbeforeactionsindexselectorpod">pod</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies the pod.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.sut.beforeActions[index].selector.pod
+<sup><sup>[↩ Parent](#benchmarkspecsutbeforeactionsindexselector)</sup></sup>
+
+
+
+Specifies the pod.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>matchLabels</b></td>
+        <td>map[string]string</td>
+        <td>
+          The matchLabels of the desired pod.<br/>
+          <br/>
+            <i>Default</i>: map[]<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.sut.resources[index]
+<sup><sup>[↩ Parent](#benchmarkspecsut)</sup></sup>
+
+
+
+
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b><a href="#benchmarkspecsutresourcesindexconfigmap">configMap</a></b></td>
+        <td>object</td>
+        <td>
+          The configMap resourceSet loads the Kubernetes manifests from an Kubernetes configMap.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecsutresourcesindexfilesystem">fileSystem</a></b></td>
+        <td>object</td>
+        <td>
+          The fileSystem resourceSet loads the Kubernetes manifests from the filesystem.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.sut.resources[index].configMap
+<sup><sup>[↩ Parent](#benchmarkspecsutresourcesindex)</sup></sup>
+
+
+
+The configMap resourceSet loads the Kubernetes manifests from an Kubernetes configMap.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>files</b></td>
         <td>[]string</td>
         <td>
           (Optional) Specifies which files from the configMap should be loaded. If this field is not set, all files are loaded.<br/>
@@ -681,6 +1249,24 @@ The fileSystem resourceSet loads the Kubernetes manifests from the filesystem.
         </tr>
     </thead>
     <tbody><tr>
+        <td><b><a href="#benchmarkspecinfrastructureafteractionsindex">afterActions</a></b></td>
+        <td>[]object</td>
+        <td>
+          Infrastructure after actions are executed after the teardown of the infrastructure.<br/>
+          <br/>
+            <i>Default</i>: []<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecinfrastructurebeforeactionsindex">beforeActions</a></b></td>
+        <td>[]object</td>
+        <td>
+          Infrastructure before actions are executed before the infrastructure is set up.<br/>
+          <br/>
+            <i>Default</i>: []<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
         <td><b><a href="#benchmarkspecinfrastructureresourcesindex">resources</a></b></td>
         <td>[]object</td>
         <td>
@@ -693,6 +1279,272 @@ The fileSystem resourceSet loads the Kubernetes manifests from the filesystem.
 </table>
 
 
+### benchmark.spec.infrastructure.afterActions[index]
+<sup><sup>[↩ Parent](#benchmarkspecinfrastructure)</sup></sup>
+
+
+
+
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b><a href="#benchmarkspecinfrastructureafteractionsindexexec">exec</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies command to be executed.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecinfrastructureafteractionsindexselector">selector</a></b></td>
+        <td>object</td>
+        <td>
+          The selector specifies which resource should be selected for the execution of the command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.infrastructure.afterActions[index].exec
+<sup><sup>[↩ Parent](#benchmarkspecinfrastructureafteractionsindex)</sup></sup>
+
+
+
+Specifies command to be executed.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>command</b></td>
+        <td>[]string</td>
+        <td>
+          The command to be executed as string array.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b>timeoutSeconds</b></td>
+        <td>integer</td>
+        <td>
+          Specifies the timeout (in seconds) for the specified command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.infrastructure.afterActions[index].selector
+<sup><sup>[↩ Parent](#benchmarkspecinfrastructureafteractionsindex)</sup></sup>
+
+
+
+The selector specifies which resource should be selected for the execution of the command.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>container</b></td>
+        <td>string</td>
+        <td>
+          Specifies the container.<br/>
+          <br/>
+            <i>Default</i>: <br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecinfrastructureafteractionsindexselectorpod">pod</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies the pod.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.infrastructure.afterActions[index].selector.pod
+<sup><sup>[↩ Parent](#benchmarkspecinfrastructureafteractionsindexselector)</sup></sup>
+
+
+
+Specifies the pod.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>matchLabels</b></td>
+        <td>map[string]string</td>
+        <td>
+          The matchLabels of the desired pod.<br/>
+          <br/>
+            <i>Default</i>: map[]<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.infrastructure.beforeActions[index]
+<sup><sup>[↩ Parent](#benchmarkspecinfrastructure)</sup></sup>
+
+
+
+
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b><a href="#benchmarkspecinfrastructurebeforeactionsindexexec">exec</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies command to be executed.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecinfrastructurebeforeactionsindexselector">selector</a></b></td>
+        <td>object</td>
+        <td>
+          The selector specifies which resource should be selected for the execution of the command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.infrastructure.beforeActions[index].exec
+<sup><sup>[↩ Parent](#benchmarkspecinfrastructurebeforeactionsindex)</sup></sup>
+
+
+
+Specifies command to be executed.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>command</b></td>
+        <td>[]string</td>
+        <td>
+          The command to be executed as string array.<br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b>timeoutSeconds</b></td>
+        <td>integer</td>
+        <td>
+          Specifies the timeout (in seconds) for the specified command.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.infrastructure.beforeActions[index].selector
+<sup><sup>[↩ Parent](#benchmarkspecinfrastructurebeforeactionsindex)</sup></sup>
+
+
+
+The selector specifies which resource should be selected for the execution of the command.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>container</b></td>
+        <td>string</td>
+        <td>
+          Specifies the container.<br/>
+          <br/>
+            <i>Default</i>: <br/>
+        </td>
+        <td>false</td>
+      </tr><tr>
+        <td><b><a href="#benchmarkspecinfrastructurebeforeactionsindexselectorpod">pod</a></b></td>
+        <td>object</td>
+        <td>
+          Specifies the pod.<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
+### benchmark.spec.infrastructure.beforeActions[index].selector.pod
+<sup><sup>[↩ Parent](#benchmarkspecinfrastructurebeforeactionsindexselector)</sup></sup>
+
+
+
+Specifies the pod.
+
+<table>
+    <thead>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Description</th>
+            <th>Required</th>
+        </tr>
+    </thead>
+    <tbody><tr>
+        <td><b>matchLabels</b></td>
+        <td>map[string]string</td>
+        <td>
+          The matchLabels of the desired pod.<br/>
+          <br/>
+            <i>Default</i>: map[]<br/>
+        </td>
+        <td>false</td>
+      </tr></tbody>
+</table>
+
+
 ### benchmark.spec.infrastructure.resources[index]
 <sup><sup>[↩ Parent](#benchmarkspecinfrastructure)</sup></sup>
 
diff --git a/docs/creating-an-execution.md b/docs/creating-an-execution.md
index e70893e7ea4364bfbb30465df95273703ec7f43b..263d630ff2db82927c72d2c2482fcddc09705bfc 100644
--- a/docs/creating-an-execution.md
+++ b/docs/creating-an-execution.md
@@ -58,7 +58,29 @@ As a Benchmark may define multiple supported load and resource types, an Executi
 ## Definition of SLOs
 
 SLOs provide a way to quantify whether a certain load intensity can be handled by a certain amount of provisioned resources.
-An Execution must at least specify one SLO to be checked.
+In Theodolite, SLO are evaluated by requesting monitoring data from Prometheus and analyzing it in a benchmark-specific way.
+An Execution must at least define one SLO to be checked.
+
+A good choice to get started is defining an SLO of type `generic`:
+
+```yaml
+- sloType: "generic"
+  prometheusUrl: "http://prometheus-operated:9090"
+  offset: 0
+  properties:
+    externalSloUrl: "http://localhost:8082"
+    promQLQuery: "sum by(job) (kafka_streams_stream_task_metrics_dropped_records_total>=0)"
+    warmup: 60 # in seconds
+    queryAggregation: max
+    repetitionAggregation: median
+    operator: lte
+    threshold: 1000
+```
+
+All you have to do is to define a [PromQL query](https://prometheus.io/docs/prometheus/latest/querying/basics/) describing which metrics should be requested (`promQLQuery`) and how the resulting time series should be evaluated. With `queryAggregation` you specify how the resulting time series is aggregated to a single value and `repetitionAggregation` describes how the results of multiple repetitions are aggregated. Possible values are
+`mean`, `median`, `mode`, `sum`, `count`, `max`, `min`, `std`, `var`, `skew`, `kurt` as well as percentiles such as `p99` or `p99.9`. The result of aggregation all repetitions is checked against `threshold`. This check is performed using an `operator`, which describes that the result must be "less than" (`lt`), "less than equal" (`lte`), "greater than" (`gt`) or "greater than equal" (`gte`) to the threshold.
+
+In case you need to evaluate monitoring data in a more flexible fashion, you can also change the value of `externalSloUrl` to your custom SLO checker. Have a look at the source code of the [generic SLO checker](https://github.com/cau-se/theodolite/tree/master/slo-checker/generic) to get started.
 
 ## Experimental Setup
 
@@ -72,7 +94,7 @@ The experimental setup can be configured by:
 
 ## Configuration Overrides
 
-In cases where only small modifications of a system under test should be benchmarked, it is not necessarily required to [create a new benchmark](creating-a-benchmark).
+In cases where only small modifications of a system under test should be benchmarked, it is not necessary to [create a new benchmark](creating-a-benchmark).
 Instead, also Executions allow to do small reconfigurations, such as switching on or off a specific Pod scheduler.
 
 This is done by defining `configOverrides` in the Execution. Each override consists of a patcher, defining which Kubernetes resource should be patched in which way, and a value the patcher is applied with.
diff --git a/docs/drafts/actions.md b/docs/drafts/actions.md
new file mode 100644
index 0000000000000000000000000000000000000000..8092fddb088b3fe8fc64f51bff03bb0c6504b74f
--- /dev/null
+++ b/docs/drafts/actions.md
@@ -0,0 +1,62 @@
+## Infrastructure
+The necessary infrastructure for an execution can be defined in the benchmark manifests. The related resources are create *before* an execution is started, and removed *after* an execution is finished.
+
+### Example
+
+```yaml
+  infrastructure:
+    resources:
+      - configMap:
+          name: "example-configmap"
+          files:
+            - "uc1-kstreams-deployment.yaml"
+```
+
+## Action Commands
+Theodolite allows to execute commands on running pods (similar to the `kubectl exec -it <pod-name> -- <command>` command). This commands can be run either before (via so called `beforeActions`) or after (via so called `afterActions`) an experiment is executed.
+
+Theodolite checks if all required pods are available for the specified actions (i.e. the pods must either be defined as infrastructure or already deployed in the cluster). If not all pods/resources are available, the benchmark will not be set as `Ready`. Consequently, an action cannot be executed on a pod that is defined as an SUT or loadGen resource.
+
+### Example
+
+```yaml
+# For the system under test
+  sut:
+    resources: ...
+    beforeActions:
+      - selector:
+          pod:
+            matchLabels:
+              app: busybox1
+        exec:
+          command: ["touch", "test-file-sut"]
+          timeoutSeconds: 90
+    afterActions:
+      - selector:
+          pod:
+            matchLabels:
+              app: busybox1
+        exec:
+          command: [ "touch", "test-file-sut-after" ]
+          timeoutSeconds: 90
+
+# analog, for the load generator
+  loadGenerator:
+    resources: ... 
+    beforeActions:
+      - selector:
+          pod:
+            matchLabels:
+              app: busybox1
+        exec:
+          command: ["touch", "test-file-loadGen"]
+          timeoutSeconds: 90
+    afterActions:
+      - selector:
+          pod:
+            matchLabels:
+              app: busybox1
+        exec:
+          command: [ "touch", "test-file-loadGen-after" ]
+          timeoutSeconds: 90
+```
\ No newline at end of file
diff --git a/docs/running-benchmarks.md b/docs/running-benchmarks.md
index eda817d28b6a10b2e2f33e6986a3b018e089beff..7da1c7e5f8385a2818ae587b4c3ab3715a6c2bb2 100644
--- a/docs/running-benchmarks.md
+++ b/docs/running-benchmarks.md
@@ -11,6 +11,7 @@ Running scalability benchmarks with Theodolite involves the following steps:
 1. [Deploying a benchmark to Kubernetes](#deploying-a-benchmark)
 1. [Creating an execution](#creating-an-execution), which describes the experimental setup for running the benchmark
 1. [Accessing benchmark results](#accessing-benchmark-results)
+1. [Analyzing benchmark results](#analyzing-benchmark-results) with Theodolite's Jupyter notebooks
 
 
 ## Deploying a Benchmark
@@ -131,3 +132,32 @@ For installations without persistence, but also as an alternative for installati
 ```sh
 kubectl cp $(kubectl get pod -l app=theodolite -o jsonpath="{.items[0].metadata.name}"):/results . -c results-access
 ```
+
+## Analyzing Benchmark Results
+
+Theodolite comes with Jupyter notebooks for analyzing and visualizing benchmark execution results.
+The easiest way to use them is at MyBinder:
+
+[Launch Notebooks](https://mybinder.org/v2/gh/cau-se/theodolite/HEAD?labpath=analysis){: .btn .btn-primary }
+{: .text-center }
+
+Alternatively, you can also [run these notebook locally](https://github.com/cau-se/theodolite/tree/master/analysis), for example, with Docker or Visual Studio Code.
+
+The notebooks allow to compute a scalability function using its *demand* metric and to visualize multiple such functions in plots:
+
+### Computing the *demand* metric with `demand-metric.ipynb` (optional)
+
+After finishing a benchmark execution, Theodolite creates a `exp<id>_demand.csv` file. It maps the tested load intensities to the minimal required resources for that load. If the monitoring data collected during benchmark execution should be analyzed in more detail, the `demand-metric.ipynb` notebook can be used. 
+
+Theodolite stores monitoring data for each conducted SLO experiment in `exp<id>_<load>_<resources>_<slo-slug>_<rep>.csv` files, where `<id>` is the ID of an execution, `<load>` the corresponding load intensity value, `<resources>` the resources value, `<slo-slug>` the [name of the SLO](creating-an-execution.html#definition-of-slos) and `<rep>` the repetition counter.
+The `demand-metric.ipynb` notebook reads these files and generates a new CSV file mapping load intensities to the minimal required resources. The format of this file corresponds to the original `exp<id>_demand.csv` file created when running the benchmark, but allows, for example, to evaluate different warm-up periods.
+
+Currently, the `demand-metric.ipynb` notebook only supports benchmarks with the *lag trend SLO* out-of-the-box, but can easily be adjusted to perform any other type of analysis.
+
+### Plotting benchmark results with the *demand* metric with `demand-metric-plot.ipynb`
+
+The `demand-metric-plot.ipynb` takes one or multiple `exp<id>_demand.csv` files as input and visualize them together in a plot.
+Input files can either be taken directly from Theodolite, or created from the `demand-metric.ipynb` notebooks.
+
+All plotting code is only intended to serve as a template. Adjust it as needed to change colors, labels, formatting, etc. as needed. 
+Please refer to the official docs of [MatPlotLib](https://matplotlib.org/) and the [ggplot](https://matplotlib.org/stable/gallery/style_sheets/ggplot.html) style, which are used to generate the plots.
diff --git a/helm/preconfigs/minimal.yaml b/helm/preconfigs/minimal.yaml
index b0828c2f424e8456933dc626a66a199cd60aa5da..80a83f06cc9838e01f812e730932b9b79d947161 100644
--- a/helm/preconfigs/minimal.yaml
+++ b/helm/preconfigs/minimal.yaml
@@ -8,5 +8,8 @@ cp-helm-charts:
       offsets.topic.replication.factor: "1"
 
 operator:
+  sloChecker:
+    droppedRecordsKStreams:
+      enabled: false
   resultsVolume:
     enabled: false
diff --git a/helm/templates/theodolite/theodolite-operator.yaml b/helm/templates/theodolite/theodolite-operator.yaml
index c7ced880cbbfbb9795ef59156ea1df7d5b860ec6..ff9c7e4de87c703af3350f7d9c797a5a53e2e675 100644
--- a/helm/templates/theodolite/theodolite-operator.yaml
+++ b/helm/templates/theodolite/theodolite-operator.yaml
@@ -31,6 +31,19 @@ spec:
           volumeMounts:
             - name: theodolite-results-volume
               mountPath: "/deployments/results"
+        {{- if .Values.operator.sloChecker.droppedRecordsKStreams.enabled }}
+        - name: slo-checker-generic
+          image: "{{ .Values.operator.sloChecker.generic.image }}:{{ .Values.operator.sloChecker.generic.imageTag }}"
+          imagePullPolicy: "{{ .Values.operator.sloChecker.generic.imagePullPolicy }}"
+          ports:
+          - containerPort: 8082
+            name: analysis
+          env:
+          - name: PORT
+            value: "8082"
+          - name: LOG_LEVEL
+            value: INFO
+        {{- end }}
         {{- if .Values.operator.sloChecker.lagTrend.enabled }}
         - name: lag-trend-slo-checker
           image: "{{ .Values.operator.sloChecker.lagTrend.image }}:{{ .Values.operator.sloChecker.lagTrend.imageTag }}"
diff --git a/helm/values.yaml b/helm/values.yaml
index 1e57b42c485eb20a5525f25cfc0ef616e65a325c..ba58b040974886518ab111d668cb0db1140b2eb8 100644
--- a/helm/values.yaml
+++ b/helm/values.yaml
@@ -256,6 +256,11 @@ operator:
   nodeSelector: {}
 
   sloChecker:
+    generic:
+      enabled: true
+      image: ghcr.io/cau-se/theodolite-slo-checker-generic
+      imageTag: latest
+      imagePullPolicy: Always
     lagTrend:
       enabled: true
       image: ghcr.io/cau-se/theodolite-slo-checker-lag-trend
diff --git a/slo-checker/generic/Dockerfile b/slo-checker/generic/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..032b8153a6989ca04631ba553289dacb3620a38d
--- /dev/null
+++ b/slo-checker/generic/Dockerfile
@@ -0,0 +1,6 @@
+FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7
+
+COPY requirements.txt requirements.txt
+RUN pip install -r requirements.txt
+
+COPY ./app /app
\ No newline at end of file
diff --git a/slo-checker/generic/README.md b/slo-checker/generic/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1a1358a06dc4165c678bca8745dd40473a7c5880
--- /dev/null
+++ b/slo-checker/generic/README.md
@@ -0,0 +1,89 @@
+# Generic SLO Evaluator
+
+## Execution
+
+For development:
+
+```sh
+uvicorn main:app --reload
+```
+
+## Build the docker image:
+
+```sh
+docker build . -t theodolite-evaluator
+```
+
+Run the Docker image:
+
+```sh
+docker run -p 80:80 theodolite-evaluator
+```
+
+## Configuration
+
+You can set the `HOST` and the `PORT` (and a lot of more parameters) via environment variables. Default is `0.0.0.0:80`.
+For more information see the [Gunicorn/FastAPI Docker docs](https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker#advanced-usage).
+
+## API Documentation
+
+The running webserver provides a REST API with the following route:
+
+* /
+  * Method: POST
+  * Body:
+    * results
+      * metric-metadata
+      * values
+    * metadata
+      * warmup
+      * queryAggregation
+      * repetitionAggregation
+      * operator
+      * threshold
+
+The body of the request must be a JSON string that satisfies the following conditions:
+
+* **dropped records**: This property is based on the [Range Vector type](https://www.prometheus.io/docs/prometheus/latest/querying/api/#range-vectors) from Prometheus and must have the following JSON *structure*:
+
+    ```json
+    {
+        "results": [
+            [
+                {
+                    "metric": {
+                        "<label-name>": "<label-value>"
+                    },
+                    "values": [
+                        [
+                            <unix_timestamp>, // 1.634624989695E9
+                            "<sample_value>" // integer
+                        ]
+                    ]
+                }
+            ]
+        ],
+        "metadata": {
+            "warmup": 60,
+            "queryAggregation": "max",
+            "repetitionAggregation": "median",
+            "operator": "lt",
+            "threshold": 2000000
+        }
+    }
+    ```
+
+### description
+
+* results:
+  * metric-metadata:
+    * Labels of this metric. The `generic` slo checker does not use labels in the calculation of the service level objective.
+  * results
+    * The `<unix_timestamp>` provided as the first element of each element in the "values" array must be the timestamp of the measurement value in seconds (with optional decimal precision)
+    * The `<sample_value>` must be the measurement value as string.
+* metadata: For the calculation of the service level objective require metadata.
+  * **warmup**: Specifies the warmup time in seconds that are ignored for evaluating the SLO.
+  * **queryAggregation**: Specifies the function used to aggregate a query. 
+  * **repetitionAggregation**: Specifies the function used to aggregate a the results of multiple query aggregations.
+  * **operator**: Specifies how the result should be checked agains a threshold. Possible values are `lt`, `lte`, `gt` and `gte`.
+  * **threshold**: Must be an unsigned integer that specifies the threshold for the SLO evaluation.
diff --git a/slo-checker/generic/app/main.py b/slo-checker/generic/app/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..f36c8739da00128ad94feb1f2d7871df7e2ff137
--- /dev/null
+++ b/slo-checker/generic/app/main.py
@@ -0,0 +1,72 @@
+from fastapi import FastAPI,Request
+import logging
+import os
+import json
+import sys
+import re
+import pandas as pd
+
+
+app = FastAPI()
+
+logging.basicConfig(stream=sys.stdout,
+                    format="%(asctime)s %(levelname)s %(name)s: %(message)s")
+logger = logging.getLogger("API")
+
+
+if os.getenv('LOG_LEVEL') == 'INFO':
+    logger.setLevel(logging.INFO)
+elif os.getenv('LOG_LEVEL') == 'WARNING':
+    logger.setLevel(logging.WARNING)
+elif os.getenv('LOG_LEVEL') == 'DEBUG':
+    logger.setLevel(logging.DEBUG)
+
+
+def get_aggr_func(func_string: str):
+    if func_string in ['mean', 'median', 'mode', 'sum', 'count', 'max', 'min', 'std', 'var', 'skew', 'kurt']:
+        return func_string
+    elif re.search(r'^p\d\d?(\.\d+)?$', func_string): # matches strings like 'p99', 'p99.99', 'p1', 'p0.001'
+        def percentile(x):
+            return x.quantile(float(func_string[1:]) / 100)
+        percentile.__name__ = func_string
+        return percentile
+    else:
+        raise ValueError('Invalid function string.')
+
+def aggr_query(values: dict, warmup: int, aggr_func):
+    df = pd.DataFrame.from_dict(values)
+    df.columns = ['timestamp', 'value']
+    filtered = df[df['timestamp'] >= (df['timestamp'][0] + warmup)]
+    filtered['value'] = filtered['value'].astype(int)
+    return filtered['value'].aggregate(aggr_func)
+
+def check_result(result, operator: str, threshold):
+    if operator == 'lt':
+        return result < threshold
+    if operator == 'lte':
+        return result <= threshold
+    if operator == 'gt':
+        return result > threshold
+    if operator == 'gte':
+        return result >= threshold
+    else:
+        raise ValueError('Invalid operator string.')
+
+
+
+@app.post("/",response_model=bool)
+async def check_slo(request: Request):
+    data = json.loads(await request.body())
+    logger.info('Received request with metadata: %s', data['metadata'])
+
+    warmup = int(data['metadata']['warmup'])
+    query_aggregation = get_aggr_func(data['metadata']['queryAggregation'])
+    rep_aggregation = get_aggr_func(data['metadata']['repetitionAggregation'])
+    operator = data['metadata']['operator']
+    threshold = int(data['metadata']['threshold'])
+
+    query_results = [aggr_query(r[0]["values"], warmup, query_aggregation) for r in data["results"]]
+    result = pd.DataFrame(query_results).aggregate(rep_aggregation).at[0]
+    return check_result(result, operator, threshold)
+
+logger.info("SLO evaluator is online")
\ No newline at end of file
diff --git a/slo-checker/generic/app/test.py b/slo-checker/generic/app/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..2609225ddc9e6e96cdcd01db197cebbdd6501102
--- /dev/null
+++ b/slo-checker/generic/app/test.py
@@ -0,0 +1,56 @@
+import unittest
+from main import app, get_aggr_func, check_result
+import json
+from fastapi.testclient import TestClient
+
+class TestSloEvaluation(unittest.TestCase):
+    client = TestClient(app)
+
+    def test_1_rep(self):
+        with open('../resources/test-1-rep-success.json') as json_file:
+            data = json.load(json_file)
+            response = self.client.post("/", json=data)
+            self.assertEqual(response.json(), True)
+
+    def test_get_aggr_func_mean(self):
+        self.assertEqual(get_aggr_func('median'), 'median')
+    
+    def test_get_aggr_func_p99(self):
+        self.assertTrue(callable(get_aggr_func('p99')))
+
+    def test_get_aggr_func_p99_9(self):
+        self.assertTrue(callable(get_aggr_func('p99.9')))
+
+    def test_get_aggr_func_p99_99(self):
+        self.assertTrue(callable(get_aggr_func('p99.99')))
+
+    def test_get_aggr_func_p0_1(self):
+        self.assertTrue(callable(get_aggr_func('p0.1')))
+
+    def test_get_aggr_func_p99_(self):
+        self.assertRaises(ValueError, get_aggr_func, 'p99.')
+
+    def test_get_aggr_func_p99_(self):
+        self.assertRaises(ValueError, get_aggr_func, 'q99')
+
+    def test_get_aggr_func_p99_(self):
+        self.assertRaises(ValueError, get_aggr_func, 'mux')
+    
+    def test_check_result_lt(self):
+        self.assertEqual(check_result(100, 'lt', 200), True)
+        
+    def test_check_result_lte(self):
+        self.assertEqual(check_result(200, 'lte', 200), True)
+    
+    def test_check_result_gt(self):
+        self.assertEqual(check_result(100, 'gt', 200), False)
+
+    def test_check_result_gte(self):
+        self.assertEqual(check_result(300, 'gte', 200), True)
+
+    def test_check_result_invalid(self):
+        self.assertRaises(ValueError, check_result, 100, 'xyz', 200)
+
+
+if __name__ == '__main__':
+    unittest.main()
\ No newline at end of file
diff --git a/slo-checker/generic/requirements.txt b/slo-checker/generic/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..87972ab01a276cbb63033e214e1ad53d38b5c8d8
--- /dev/null
+++ b/slo-checker/generic/requirements.txt
@@ -0,0 +1,4 @@
+fastapi==0.65.2
+pandas==1.0.3
+uvicorn
+requests
diff --git a/slo-checker/generic/resources/test-1-rep-success.json b/slo-checker/generic/resources/test-1-rep-success.json
new file mode 100644
index 0000000000000000000000000000000000000000..b70f461cf620d8eee8c4d9d93feb46db7498626f
--- /dev/null
+++ b/slo-checker/generic/resources/test-1-rep-success.json
@@ -0,0 +1,276 @@
+{
+    "results": [
+        [
+            {
+                "metric": {
+                    "job": "titan-ccp-aggregation"
+                },
+                "values": [
+                    [
+                        1.634624674695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624679695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624684695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624689695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624694695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624699695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624704695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624709695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624714695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624719695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624724695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624729695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624734695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624739695E9,
+                        "0"
+                    ],
+                    [
+                        1.634624744695E9,
+                        "1"
+                    ],
+                    [
+                        1.634624749695E9,
+                        "3"
+                    ],
+                    [
+                        1.634624754695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624759695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624764695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624769695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624774695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624779695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624784695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624789695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624794695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624799695E9,
+                        "4"
+                    ],
+                    [
+                        1.634624804695E9,
+                        "176"
+                    ],
+                    [
+                        1.634624809695E9,
+                        "176"
+                    ],
+                    [
+                        1.634624814695E9,
+                        "176"
+                    ],
+                    [
+                        1.634624819695E9,
+                        "176"
+                    ],
+                    [
+                        1.634624824695E9,
+                        "176"
+                    ],
+                    [
+                        1.634624829695E9,
+                        "159524"
+                    ],
+                    [
+                        1.634624834695E9,
+                        "209870"
+                    ],
+                    [
+                        1.634624839695E9,
+                        "278597"
+                    ],
+                    [
+                        1.634624844695E9,
+                        "460761"
+                    ],
+                    [
+                        1.634624849695E9,
+                        "460761"
+                    ],
+                    [
+                        1.634624854695E9,
+                        "460761"
+                    ],
+                    [
+                        1.634624859695E9,
+                        "460761"
+                    ],
+                    [
+                        1.634624864695E9,
+                        "460761"
+                    ],
+                    [
+                        1.634624869695E9,
+                        "606893"
+                    ],
+                    [
+                        1.634624874695E9,
+                        "653534"
+                    ],
+                    [
+                        1.634624879695E9,
+                        "755796"
+                    ],
+                    [
+                        1.634624884695E9,
+                        "919317"
+                    ],
+                    [
+                        1.634624889695E9,
+                        "919317"
+                    ],
+                    [
+                        1.634624894695E9,
+                        "955926"
+                    ],
+                    [
+                        1.634624899695E9,
+                        "955926"
+                    ],
+                    [
+                        1.634624904695E9,
+                        "955926"
+                    ],
+                    [
+                        1.634624909695E9,
+                        "955926"
+                    ],
+                    [
+                        1.634624914695E9,
+                        "955926"
+                    ],
+                    [
+                        1.634624919695E9,
+                        "1036530"
+                    ],
+                    [
+                        1.634624924695E9,
+                        "1078477"
+                    ],
+                    [
+                        1.634624929695E9,
+                        "1194775"
+                    ],
+                    [
+                        1.634624934695E9,
+                        "1347755"
+                    ],
+                    [
+                        1.634624939695E9,
+                        "1352151"
+                    ],
+                    [
+                        1.634624944695E9,
+                        "1360428"
+                    ],
+                    [
+                        1.634624949695E9,
+                        "1360428"
+                    ],
+                    [
+                        1.634624954695E9,
+                        "1360428"
+                    ],
+                    [
+                        1.634624959695E9,
+                        "1360428"
+                    ],
+                    [
+                        1.634624964695E9,
+                        "1360428"
+                    ],
+                    [
+                        1.634624969695E9,
+                        "1525685"
+                    ],
+                    [
+                        1.634624974695E9,
+                        "1689296"
+                    ],
+                    [
+                        1.634624979695E9,
+                        "1771358"
+                    ],
+                    [
+                        1.634624984695E9,
+                        "1854284"
+                    ],
+                    [
+                        1.634624989695E9,
+                        "1854284"
+                    ]
+                ]
+            }
+        ]
+    ],
+    "metadata": {
+        "warmup": 60,
+        "queryAggregation": "max",
+        "repetitionAggregation": "median",
+        "operator": "lt",
+        "threshold": 2000000
+    }
+}
\ No newline at end of file
diff --git a/theodolite-benchmarks/beam-commons/build.gradle b/theodolite-benchmarks/beam-commons/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..a809f6bc4b97d8d62b807243eddecda8a5de5032
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/build.gradle
@@ -0,0 +1,33 @@
+plugins {
+    id 'theodolite.java-commons'
+}
+
+repositories {
+  mavenCentral()
+  maven {
+    url "https://oss.sonatype.org/content/repositories/snapshots/"
+  }
+  maven {
+    url 'https://packages.confluent.io/maven/'
+  }
+}
+
+dependencies {
+  // These dependencies are used internally, and not exposed to consumers on their own compile classpath.
+  implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
+  implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
+  implementation 'com.google.code.gson:gson:2.8.2'
+  implementation 'com.google.guava:guava:24.1-jre'
+
+  implementation('org.apache.beam:beam-sdks-java-io-kafka:2.22.0'){
+    exclude group: 'org.apache.kafka', module: 'kafka-clients'
+  }
+  implementation group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.30'
+  implementation group: 'org.apache.beam', name: 'beam-sdks-java-core', version: '2.22.0'
+
+  runtimeOnly 'org.slf4j:slf4j-api:1.7.32'
+  runtimeOnly 'org.slf4j:slf4j-jdk14:1.7.32'
+
+  // Use JUnit test framework
+  testImplementation 'junit:junit:4.12'
+}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractBeamService.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractBeamService.java
new file mode 100644
index 0000000000000000000000000000000000000000..03c5ca1daa7ffab71a4d08c04f677d7412e3a2be
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractBeamService.java
@@ -0,0 +1,45 @@
+package theodolite.commons.beam;
+
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.options.PipelineOptionsFactory;
+import org.apache.commons.configuration2.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import titan.ccp.common.configuration.ServiceConfigurations;
+
+/**
+ * Abstraction of a Beam microservice.
+ * Encapsulates the corresponding {@link PipelineOptions} and the beam Runner.
+ */
+public class AbstractBeamService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractBeamService.class);
+
+  // Beam Pipeline
+  protected PipelineOptions options;
+
+  // Application Configurations
+  private final Configuration config = ServiceConfigurations.createWithDefaults();
+  private final String applicationName =
+      config.getString(ConfigurationKeys.APPLICATION_NAME);
+
+
+  /**
+   * Creates AbstractBeamService with options.
+   */
+  public AbstractBeamService(final String[] args) { //NOPMD
+    super();
+    LOGGER.info("Pipeline options:");
+    for (final String s : args) {
+      LOGGER.info("{}", s);
+    }
+    options = PipelineOptionsFactory.fromArgs(args).create();
+    options.setJobName(applicationName);
+    LOGGER.info("Starting BeamService with PipelineOptions {}:", this.options.toString());
+  }
+
+  public Configuration getConfig() {
+    return config;
+  }
+
+}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractPipeline.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractPipeline.java
new file mode 100644
index 0000000000000000000000000000000000000000..c936ce918c10f3c500cdd26f7e057cd7b6c555b6
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/AbstractPipeline.java
@@ -0,0 +1,50 @@
+package theodolite.commons.beam;
+
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.commons.configuration2.Configuration;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+
+/**
+ * Abstraction of a Beam {@link Pipeline}.
+ */
+public class AbstractPipeline extends Pipeline {
+
+  protected final String inputTopic;
+  protected final String bootstrapServer;
+  // Application Configurations
+  private final Configuration config;
+
+  protected AbstractPipeline(final PipelineOptions options, final Configuration config) {
+    super(options);
+    this.config = config;
+
+    inputTopic = config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
+    bootstrapServer = config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
+  }
+
+  /**
+   * Builds a simple configuration for a Kafka consumer transformation.
+   *
+   * @return the build configuration.
+   */
+  public Map<String, Object> buildConsumerConfig() {
+    final Map<String, Object> consumerConfig = new HashMap<>();
+    consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
+        config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
+    consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
+        config
+            .getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
+    consumerConfig.put("schema.registry.url",
+        config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
+
+    consumerConfig.put("specific.avro.reader",
+        config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
+
+    final String applicationName = config.getString(ConfigurationKeys.APPLICATION_NAME);
+    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, applicationName);
+    return consumerConfig;
+  }
+}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/ConfigurationKeys.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..1e4dc593c627282f5c6735a4d91e963d83af6865
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/ConfigurationKeys.java
@@ -0,0 +1,48 @@
+package theodolite.commons.beam;
+
+/**
+ * Keys to access configuration parameters.
+ */
+public final class ConfigurationKeys {
+  // Common keys
+  public static final String APPLICATION_NAME = "application.name";
+
+  public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
+
+  public static final String SCHEMA_REGISTRY_URL = "schema.registry.url";
+
+  public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic";
+
+  // Additional topics
+  public static final String KAFKA_FEEDBACK_TOPIC = "kafka.feedback.topic";
+
+  public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic";
+
+  public static final String KAFKA_CONFIGURATION_TOPIC = "kafka.configuration.topic";
+
+  // UC2
+  public static final String KAFKA_WINDOW_DURATION_MINUTES = "kafka.window.duration.minutes";
+
+  // UC3
+  public static final String AGGREGATION_DURATION_DAYS = "aggregation.duration.days";
+
+  public static final String AGGREGATION_ADVANCE_DAYS = "aggregation.advance.days";
+
+  // UC4
+  public static final String GRACE_PERIOD_MS = "grace.period.ms";
+
+
+  // BEAM
+  public static final String ENABLE_AUTO_COMMIT_CONFIG = "enable.auto.commit.config";
+
+  public static final String AUTO_OFFSET_RESET_CONFIG = "auto.offset.reset.config";
+
+  public static final String SPECIFIC_AVRO_READER = "specific.avro.reader";
+
+  public static final String TRIGGER_INTERVAL  = "trigger.interval";
+
+
+  private ConfigurationKeys() {
+  }
+
+}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/EventTimePolicy.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/EventTimePolicy.java
new file mode 100644
index 0000000000000000000000000000000000000000..e568968670b3ea51388f3e2a19da8f64bd7c5391
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/EventTimePolicy.java
@@ -0,0 +1,35 @@
+package theodolite.commons.beam.kafka;
+
+import java.util.Optional;
+import org.apache.beam.sdk.io.kafka.KafkaRecord;
+import org.apache.beam.sdk.io.kafka.TimestampPolicy;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.joda.time.Instant;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * TimeStampPolicy to use event time based on the timestamp of the record value.
+ */
+public class EventTimePolicy
+    extends TimestampPolicy<String, ActivePowerRecord> {
+  protected Instant currentWatermark;
+
+  public EventTimePolicy(final Optional<Instant> previousWatermark) {
+    super();
+    this.currentWatermark = previousWatermark.orElse(BoundedWindow.TIMESTAMP_MIN_VALUE);
+  }
+
+
+  @Override
+  public Instant getTimestampForRecord(final PartitionContext ctx,
+      final KafkaRecord<String, ActivePowerRecord> record) {
+    this.currentWatermark = new Instant(record.getKV().getValue().getTimestamp());
+    return this.currentWatermark;
+  }
+
+  @Override
+  public Instant getWatermark(final PartitionContext ctx) {
+    return this.currentWatermark;
+  }
+
+}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerRecordReader.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerRecordReader.java
new file mode 100644
index 0000000000000000000000000000000000000000..f102bee41d66c251ecb66418dd3b90dced32cffb
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerRecordReader.java
@@ -0,0 +1,61 @@
+package theodolite.commons.beam.kafka;
+
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Map;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.io.kafka.KafkaIO;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PBegin;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Simple {@link PTransform} that read from Kafka using {@link KafkaIO}.
+ */
+public class KafkaActivePowerRecordReader extends
+    PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {
+
+  private static final long serialVersionUID = 2603286150183186115L;
+  private final PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> reader;
+
+
+  /**
+   * Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
+   */
+  public KafkaActivePowerRecordReader(final String bootstrapServer, final String inputTopic,
+                                      final Map<String, Object> consumerConfig) {
+    super();
+
+    if (bootstrapServer == null) {
+      throw new IllegalArgumentException("bootstrapServer is null");
+    }
+
+    if (inputTopic == null) {
+      throw new IllegalArgumentException("inputTopic is null");
+    }
+
+    // Check if boostrap server and inputTopic are defined
+    if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
+      throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
+    }
+
+
+    reader =
+        KafkaIO.<String, ActivePowerRecord>read()
+            .withBootstrapServers(bootstrapServer)
+            .withTopic(inputTopic)
+            .withKeyDeserializer(StringDeserializer.class)
+            .withValueDeserializerAndCoder((Class) KafkaAvroDeserializer.class,
+                AvroCoder.of(ActivePowerRecord.class))
+            .withConsumerConfigUpdates(consumerConfig)
+            .withoutMetadata();
+  }
+
+  @Override
+  public PCollection<KV<String, ActivePowerRecord>> expand(final PBegin input) {
+    return input.apply(this.reader);
+  }
+
+}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerTimestampReader.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerTimestampReader.java
new file mode 100644
index 0000000000000000000000000000000000000000..732afe9a0c1d4bdfea876025fceea0c5da1310fe
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaActivePowerTimestampReader.java
@@ -0,0 +1,56 @@
+package theodolite.commons.beam.kafka;
+
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Map;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.io.kafka.KafkaIO;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PBegin;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Simple {@link PTransform} that read from Kafka using {@link KafkaIO}.
+ * Has additional a TimestampPolicy.
+ */
+public class KafkaActivePowerTimestampReader extends
+    PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {
+
+  private static final long serialVersionUID = 2603286150183186115L;
+  private final PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> reader;
+
+
+  /**
+   * Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
+   */
+  public KafkaActivePowerTimestampReader(final String bootstrapServer, final String inputTopic,
+                                         final Map<String, Object> consumerConfig) {
+    super();
+
+    // Check if boostrap server and inputTopic are defined
+    if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
+      throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
+    }
+
+    reader =
+        KafkaIO.<String, ActivePowerRecord>read()
+            .withBootstrapServers(bootstrapServer)
+            .withTopic(inputTopic)
+            .withKeyDeserializer(StringDeserializer.class)
+            .withValueDeserializerAndCoder((Class) KafkaAvroDeserializer.class,
+                AvroCoder.of(ActivePowerRecord.class))
+            .withConsumerConfigUpdates(consumerConfig)
+            // Set TimeStampPolicy for event time
+            .withTimestampPolicyFactory(
+                (tp, previousWaterMark) -> new EventTimePolicy(previousWaterMark))
+            .withoutMetadata();
+  }
+
+  @Override
+  public PCollection<KV<String, ActivePowerRecord>> expand(final PBegin input) {
+    return input.apply(this.reader);
+  }
+
+}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaGenericReader.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaGenericReader.java
new file mode 100644
index 0000000000000000000000000000000000000000..83336b5a4c2451ef4bffefbd60ad9d52fccd9c17
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaGenericReader.java
@@ -0,0 +1,55 @@
+package theodolite.commons.beam.kafka;
+
+import java.util.Map;
+import org.apache.beam.sdk.io.kafka.KafkaIO;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PBegin;
+import org.apache.beam.sdk.values.PCollection;
+
+/**
+ * Simple {@link PTransform} that read from Kafka using {@link KafkaIO}.
+ *
+ * @param <K> Type of the Key.
+ * @param <V> Type of the Value.
+ */
+public class KafkaGenericReader<K, V> extends
+    PTransform<PBegin, PCollection<KV<K, V>>> {
+
+  private static final long serialVersionUID = 2603286150183186115L;
+  private final PTransform<PBegin, PCollection<KV<K, V>>> reader;
+
+  /**
+   * Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
+   */
+  public KafkaGenericReader(final String bootstrapServer, final String inputTopic,
+                            final Map<String, Object> consumerConfig,
+                            final Class<? extends
+                                org.apache.kafka.common.serialization.Deserializer<K>>
+                                  keyDeserializer,
+                            final Class<? extends
+                                org.apache.kafka.common.serialization.Deserializer<V>>
+                                  valueDeserializer) {
+    super();
+
+    // Check if boostrap server and inputTopic are defined
+    if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
+      throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
+    }
+
+    reader =
+        KafkaIO.<K, V>read()
+            .withBootstrapServers(bootstrapServer)
+            .withTopic(inputTopic)
+            .withKeyDeserializer(keyDeserializer)
+            .withValueDeserializer(valueDeserializer)
+            .withConsumerConfigUpdates(consumerConfig)
+            .withoutMetadata();
+  }
+
+  @Override
+  public PCollection<KV<K, V>> expand(final PBegin input) {
+    return input.apply(this.reader);
+  }
+
+}
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaWriterTransformation.java b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaWriterTransformation.java
new file mode 100644
index 0000000000000000000000000000000000000000..0a3867e71479e36ce30a9f222dfd0a7d473bd209
--- /dev/null
+++ b/theodolite-benchmarks/beam-commons/src/main/java/theodolite/commons/beam/kafka/KafkaWriterTransformation.java
@@ -0,0 +1,45 @@
+package theodolite.commons.beam.kafka;
+
+import org.apache.beam.sdk.io.kafka.KafkaIO;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PDone;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+
+/**
+ * Wrapper for a Kafka writing Transformation
+ * where the value type can be generic.
+ * @param <T> type of the value.
+ */
+public class KafkaWriterTransformation<T> extends
+    PTransform<PCollection<KV<String, T>>, PDone> {
+
+  private static final long serialVersionUID = 3171423303843174723L;
+  private final PTransform<PCollection<KV<String, T>>, PDone> writer;
+
+  /**
+   * Creates a new kafka writer transformation.
+   */
+  public KafkaWriterTransformation(final String bootstrapServer, final String outputTopic,
+                                   final Class<? extends Serializer<T>> valueSerializer) {
+    super();
+    // Check if boostrap server and outputTopic are defined
+    if (bootstrapServer.isEmpty() || outputTopic.isEmpty()) {
+      throw new IllegalArgumentException("bootstrapServer or outputTopic missing");
+    }
+
+    this.writer = KafkaIO.<String, T>write()
+        .withBootstrapServers(bootstrapServer)
+        .withTopic(outputTopic)
+        .withKeySerializer(StringSerializer.class)
+        .withValueSerializer(valueSerializer);
+
+  }
+
+  @Override
+  public PDone expand(final PCollection<KV<String, T>> input) {
+    return input.apply(this.writer);
+  }
+}
diff --git a/theodolite-benchmarks/buildSrc/build.gradle b/theodolite-benchmarks/buildSrc/build.gradle
index 4c099de32dc97ed3aa0417e8fff1f06e2a50dfd8..e3c3a8cb496929a5005c28d5d87b00bae85ad2cd 100644
--- a/theodolite-benchmarks/buildSrc/build.gradle
+++ b/theodolite-benchmarks/buildSrc/build.gradle
@@ -1,14 +1,3 @@
-buildscript {
-  repositories {
-    maven {
-      url "https://plugins.gradle.org/m2/"
-    }
-  }
-  dependencies {
-    classpath "com.github.jengelman.gradle.plugins:shadow:6.0.0"
-  }
-}
-
 // to discover the precompiled script plugins
 plugins {
     id 'groovy-gradle-plugin'
@@ -19,6 +8,6 @@ repositories {
 }
 
 dependencies {
-    implementation 'gradle.plugin.com.github.spotbugs.snom:spotbugs-gradle-plugin:4.6.0'
-    implementation 'com.github.jengelman.gradle.plugins:shadow:6.0.0'
+    implementation 'com.github.spotbugs.snom:spotbugs-gradle-plugin:5.0.4'
+    implementation 'gradle.plugin.com.github.johnrengelman:shadow:7.1.2'
 }
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.flink.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.flink.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..eb9bda1f84c4f20568fca1498462dff9082ea1fa
--- /dev/null
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.flink.gradle
@@ -0,0 +1,8 @@
+plugins {
+    id 'theodolite.beam'
+}
+
+dependencies {
+    implementation group: 'org.apache.beam', name: 'beam-runners-flink-1.12', version: '2.27.0'
+    implementation group: 'org.apache.flink', name: 'flink-statebackend-rocksdb_2.11', version: '1.12.0'
+}
\ No newline at end of file
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..41d1ae4f2bdfa358aca3fca2b91ea2b57e4c3405
--- /dev/null
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.gradle
@@ -0,0 +1,45 @@
+plugins {
+    // common java conventions
+    id 'theodolite.java-conventions'
+
+    // make executable
+    id 'application'
+}
+
+tasks.distZip.enabled = false
+
+repositories {
+    mavenCentral()
+    maven {
+        url "https://oss.sonatype.org/content/repositories/snapshots/"
+    }
+    maven {
+        url 'https://packages.confluent.io/maven/'
+    }
+}
+
+def apacheBeamVersion =  '2.22.0' //'2.27.0' // '2.34.0'
+
+dependencies {
+    // These dependencies are used internally, and not exposed to consumers on their own compile classpath.
+    implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
+    implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
+    implementation 'com.google.code.gson:gson:2.8.2'
+    implementation 'com.google.guava:guava:24.1-jre'
+    implementation 'org.slf4j:slf4j-simple:1.7.25'
+    implementation project(':beam-commons')
+
+    implementation group: 'org.apache.beam', name: 'beam-sdks-java-core', version: "${apacheBeamVersion}"
+
+    implementation("org.apache.beam:beam-sdks-java-io-kafka:${apacheBeamVersion}"){
+        exclude group: 'org.apache.kafka', module: 'kafka-clients'
+    }
+    implementation group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.30'
+
+    runtimeOnly "org.apache.beam:beam-runners-direct-java:${apacheBeamVersion}"
+    runtimeOnly 'org.slf4j:slf4j-api:1.7.32'
+    runtimeOnly 'org.slf4j:slf4j-jdk14:1.7.32'
+
+    // Use JUnit test framework
+    testImplementation 'junit:junit:4.12'
+}
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.samza.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.samza.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..73e916ccc867b9b3316776192f0dab56fa0710f0
--- /dev/null
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.beam.samza.gradle
@@ -0,0 +1,9 @@
+plugins {
+    id 'theodolite.beam'
+}
+
+dependencies {
+    implementation('org.apache.beam:beam-runners-samza:2.22.0') {
+        exclude group: 'org.apache.samza', module: 'samza-yarn_2.11'
+    }
+}
\ No newline at end of file
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle
index 26a827b6049d09e422d48609590614f383f6cae8..f5e93dd88d2234f8a9b0d6fea880f47d652dccfa 100644
--- a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle
@@ -25,7 +25,7 @@ ext {
 }
 
 repositories {
-  jcenter()
+  mavenCentral()
   maven {
     url "https://oss.sonatype.org/content/repositories/snapshots/"
   }
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-conventions.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-conventions.gradle
index 5b0e2a8a1211653428b296b11b14c1531e40e46b..a2912d96262f3d9c9f843ed508370e2798aeb735 100644
--- a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-conventions.gradle
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-conventions.gradle
@@ -66,5 +66,5 @@ spotbugs {
   reportLevel = "low"
   effort = "max"
   ignoreFailures = false
-  toolVersion = '4.1.4'
+  toolVersion = '4.5.3'
 }
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle
index eece7b835ae9d6f39283ea371ce8b0b8194cdaa0..da2d42176ac0ddc9a157f843e3268b37ac4397e2 100644
--- a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle
@@ -9,7 +9,7 @@ plugins {
 tasks.distZip.enabled = false
 
 repositories {
-  jcenter()
+  mavenCentral()
   maven {
     url "https://oss.sonatype.org/content/repositories/snapshots/"
   }
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.load-generator.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.load-generator.gradle
index c6c2b6057cf35c32faa4d67b6ea6dba9e5c13beb..fb4fd89d1fe8a6d625a3ba7b459e9b0961befdbc 100644
--- a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.load-generator.gradle
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.load-generator.gradle
@@ -9,7 +9,7 @@ plugins {
 tasks.distZip.enabled = false
 
 repositories {
-  jcenter()
+  mavenCentral()
   maven {
     url "https://oss.sonatype.org/content/repositories/snapshots/"
   }
diff --git a/theodolite-benchmarks/docker-test/uc1-beam-flink/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-beam-flink/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..d8a7b946a9d5e407032ce02838b3ad02892eae73
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-beam-flink/docker-compose.yml
@@ -0,0 +1,74 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
+  benchmark-jobmanager:
+      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
+      #ports:
+      #  - "8080:8081"
+      command: >
+        standalone-job --job-classname application.Uc1BeamFlink
+        --disableMetrics=true 
+        --fasterCopy 
+      environment:
+        - KAFKA_BOOTSTRAP_SERVERS=kafka:9092
+        - SCHEMA_REGISTRY_URL=http://schema-registry:8081
+        - |
+          FLINK_PROPERTIES=
+          jobmanager.rpc.address: benchmark-jobmanager
+          parallelism.default: 1
+      depends_on:
+        - schema-registry
+        - kafka
+  benchmark-taskmanager:
+      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
+      scale: 1
+      command: taskmanager
+      environment:
+        - |
+          FLINK_PROPERTIES=
+          jobmanager.rpc.address: benchmark-jobmanager
+      depends_on:
+        - schema-registry
+        - kafka
diff --git a/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..11cf0c345b417fdda7cedba2f9db1342d2b64634
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-beam-samza/docker-compose.yml
@@ -0,0 +1,59 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    #ports:
+    #  - 2181:2181
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc1-beam-samza:latest
+    scale: 1
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      SAMZA_JOB_COORDINATOR_ZK_CONNECT: zookeeper:2181
+      SAMZA_SYSTEMS_KAFKA_PRODUCER_BOOTSTRAP_SERVERS: kafka:9092
+      SAMZA_SYSTEMS_KAFKA_CONSUMER_BOOTSTRAP_SERVERS: kafka:9092
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc2-beam-flink/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-beam-flink/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..f8bdfae935a55c8cb60e3fb22b19c471832ca9f4
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-beam-flink/docker-compose.yml
@@ -0,0 +1,74 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
+  benchmark-jobmanager:
+      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
+      #ports:
+      #  - "8080:8081"
+      command: >
+        standalone-job --job-classname application.Uc2BeamFlink
+        --disableMetrics=true 
+        --fasterCopy 
+      environment:
+        - KAFKA_BOOTSTRAP_SERVERS=kafka:9092
+        - SCHEMA_REGISTRY_URL=http://schema-registry:8081
+        - |
+          FLINK_PROPERTIES=
+          jobmanager.rpc.address: benchmark-jobmanager
+          parallelism.default: 1
+      depends_on:
+        - schema-registry
+        - kafka
+  benchmark-taskmanager:
+      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
+      scale: 1
+      command: taskmanager
+      environment:
+        - |
+          FLINK_PROPERTIES=
+          jobmanager.rpc.address: benchmark-jobmanager
+      depends_on:
+        - schema-registry
+        - kafka
diff --git a/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..67a5997b66833e33696592285dffe24b03b3d210
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-beam-samza/docker-compose.yml
@@ -0,0 +1,59 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    #ports:
+    #  - 2181:2181
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc2-beam-samza:latest
+    scale: 1
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      SAMZA_JOB_COORDINATOR_ZK_CONNECT: zookeeper:2181
+      SAMZA_SYSTEMS_KAFKA_PRODUCER_BOOTSTRAP_SERVERS: kafka:9092
+      SAMZA_SYSTEMS_KAFKA_CONSUMER_BOOTSTRAP_SERVERS: kafka:9092
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc3-beam-flink/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-beam-flink/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..9a18ab364463a985b40cd691f6232b9b47ae412e
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-beam-flink/docker-compose.yml
@@ -0,0 +1,78 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
+  benchmark-jobmanager:
+      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
+      #ports:
+      #  - "8080:8081"
+      command: >
+        standalone-job --job-classname application.Uc3BeamFlink
+        --disableMetrics=true 
+        --fasterCopy 
+      environment:
+        - KAFKA_BOOTSTRAP_SERVERS=kafka:9092
+        - SCHEMA_REGISTRY_URL=http://schema-registry:8081
+        - |
+          FLINK_PROPERTIES=
+          jobmanager.rpc.address: benchmark-jobmanager
+          parallelism.default: 1
+          state.backend: rocksdb
+          state.checkpoints.dir: file:///data/flink/checkpoints
+      depends_on:
+        - schema-registry
+        - kafka
+  benchmark-taskmanager:
+      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
+      scale: 1
+      command: taskmanager
+      environment:
+        - |
+          FLINK_PROPERTIES=
+          jobmanager.rpc.address: benchmark-jobmanager
+          state.backend: rocksdb
+          state.checkpoints.dir: file:///data/flink/checkpoints
+      depends_on:
+        - schema-registry
+        - kafka
diff --git a/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..a50b32bd8f78678d63f06688821d6dfb5f133138
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-beam-samza/docker-compose.yml
@@ -0,0 +1,59 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    #ports:
+    #  - 2181:2181
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc3-beam-samza:latest
+    scale: 1
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      SAMZA_JOB_COORDINATOR_ZK_CONNECT: zookeeper:2181
+      SAMZA_SYSTEMS_KAFKA_PRODUCER_BOOTSTRAP_SERVERS: kafka:9092
+      SAMZA_SYSTEMS_KAFKA_CONSUMER_BOOTSTRAP_SERVERS: kafka:9092
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc4-beam-flink/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-beam-flink/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..5169ac551952f992d98c74f7d65d5378ecdcc2a5
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-beam-flink/docker-compose.yml
@@ -0,0 +1,80 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 4
+      NUM_NESTED_GROUPS: 4
+  benchmark-jobmanager:
+      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:latest
+      #ports:
+      #  - "8080:8081"
+      command: >
+        standalone-job
+        --job-classname application.Uc4BeamFlink
+        --disableMetrics=true 
+        --fasterCopy 
+      environment:
+        - KAFKA_BOOTSTRAP_SERVERS=kafka:9092
+        - SCHEMA_REGISTRY_URL=http://schema-registry:8081
+        - |
+          FLINK_PROPERTIES=
+          jobmanager.rpc.address: benchmark-jobmanager
+          parallelism.default: 1
+          state.backend: rocksdb
+          state.checkpoints.dir: file:///data/flink/checkpoints
+      depends_on:
+        - schema-registry
+        - kafka
+  benchmark-taskmanager:
+      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:latest
+      scale: 1
+      command: taskmanager
+      environment:
+        - |
+          FLINK_PROPERTIES=
+          jobmanager.rpc.address: benchmark-jobmanager
+          state.backend: rocksdb
+          state.checkpoints.dir: file:///data/flink/checkpoints
+      depends_on:
+        - schema-registry
+        - kafka
diff --git a/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..bded9d5d227d0f62cb6cb3f9edac3df383ea3e8a
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-beam-samza/docker-compose.yml
@@ -0,0 +1,60 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    #ports:
+    #  - 2181:2181
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  schema-registry:
+    image: confluentinc/cp-schema-registry:5.3.1
+    depends_on:
+      - zookeeper
+      - kafka
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc4-beam-samza:latest
+    scale: 1
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      SAMZA_JOB_COORDINATOR_ZK_CONNECT: zookeeper:2181
+      SAMZA_SYSTEMS_KAFKA_PRODUCER_BOOTSTRAP_SERVERS: kafka:9092
+      SAMZA_SYSTEMS_KAFKA_CONSUMER_BOOTSTRAP_SERVERS: kafka:9092
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 4
+      NUM_NESTED_GROUPS: 4
diff --git a/theodolite-benchmarks/flink-commons/build.gradle b/theodolite-benchmarks/flink-commons/build.gradle
index edd48c914b8c909ff196bb98e9bbc8b9d99865b9..a3a4a35752006bb10e15ff508ce0b37f70adc57d 100644
--- a/theodolite-benchmarks/flink-commons/build.gradle
+++ b/theodolite-benchmarks/flink-commons/build.gradle
@@ -8,7 +8,7 @@ ext {
 }
 
 repositories {
-  jcenter()
+  mavenCentral()
   maven {
     url "https://oss.sonatype.org/content/repositories/snapshots/"
   }
diff --git a/theodolite-benchmarks/gradle/wrapper/gradle-wrapper.properties b/theodolite-benchmarks/gradle/wrapper/gradle-wrapper.properties
index 442d9132ea32808ad980df4bd233b359f76341a7..2e6e5897b5285c749d75662c65ac5d2904c37bc6 100644
--- a/theodolite-benchmarks/gradle/wrapper/gradle-wrapper.properties
+++ b/theodolite-benchmarks/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
 distributionBase=GRADLE_USER_HOME
 distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.8.3-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip
 zipStoreBase=GRADLE_USER_HOME
 zipStorePath=wrapper/dists
diff --git a/theodolite-benchmarks/kstreams-commons/build.gradle b/theodolite-benchmarks/kstreams-commons/build.gradle
index c5a880acd4377056cc0b0f06b33a2d74c9f87c4e..7683ffe39314ec375eda0ed4e139d618d44a7328 100644
--- a/theodolite-benchmarks/kstreams-commons/build.gradle
+++ b/theodolite-benchmarks/kstreams-commons/build.gradle
@@ -3,7 +3,7 @@ plugins {
 }
 
 repositories {
-  jcenter()
+  mavenCentral()
   maven {
     url "https://oss.sonatype.org/content/repositories/snapshots/"
   }
diff --git a/theodolite-benchmarks/load-generator-commons/build.gradle b/theodolite-benchmarks/load-generator-commons/build.gradle
index 118f3e648f829a3eafe719ddf660d35ac8563574..f2aa10b079f4be80d19d9ac5d822b7bdab0b6d78 100644
--- a/theodolite-benchmarks/load-generator-commons/build.gradle
+++ b/theodolite-benchmarks/load-generator-commons/build.gradle
@@ -3,7 +3,7 @@ plugins {
 }
 
 repositories {
-  jcenter()
+  mavenCentral()
   maven {
     url "https://oss.sonatype.org/content/repositories/snapshots/"
   }
diff --git a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java
index 73f064d1ce44ff8a613f9ce0a7b9a64d4bac6c38..3f5d14c2e7dccb94e4aacde1f531ec2e9d1fb8db 100644
--- a/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java
+++ b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java
@@ -22,7 +22,7 @@ public final class LoadGenerator {
   private static final int THREADS_DEFAULT = 4;
   private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
   private static final String KAFKA_TOPIC_DEFAULT = "input";
-  private static final String KAFKA_BOOTSTRAP_SERVERS_DEFAULT = "localhost:19092"; // NOPMD
+  private static final String KAFKA_BOOTSTRAP_SERVERS_DEFAULT = "localhost:9092"; // NOPMD
 
   private ClusterConfig clusterConfig;
   private WorkloadDefinition loadDefinition;
diff --git a/theodolite-benchmarks/settings.gradle b/theodolite-benchmarks/settings.gradle
index 5602e816bb21dce72162b085de99836b8f9aea1e..ae4254e968a0bc09970752f95c6a40db86ae775c 100644
--- a/theodolite-benchmarks/settings.gradle
+++ b/theodolite-benchmarks/settings.gradle
@@ -3,19 +3,34 @@ rootProject.name = 'theodolite-benchmarks'
 include 'load-generator-commons'
 include 'kstreams-commons'
 include 'flink-commons'
+include 'beam-commons'
+
+include 'uc1-beam'
+include 'uc2-beam'
+include 'uc3-beam'
+include 'uc4-beam'
 
 include 'uc1-load-generator'
 include 'uc1-kstreams'
 include 'uc1-flink'
+include 'uc1-beam-flink'
+include 'uc1-beam-samza'
 
 include 'uc2-load-generator'
 include 'uc2-kstreams'
 include 'uc2-flink'
+include 'uc2-beam-flink'
+include 'uc2-beam-samza'
 
 include 'uc3-load-generator'
 include 'uc3-kstreams'
 include 'uc3-flink'
+include 'uc3-beam-flink'
+include 'uc3-beam-samza'
 
 include 'uc4-load-generator'
 include 'uc4-kstreams'
 include 'uc4-flink'
+include 'uc4-beam-flink'
+include 'uc4-beam-samza'
+
diff --git a/theodolite-benchmarks/uc1-beam-flink/Dockerfile b/theodolite-benchmarks/uc1-beam-flink/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..0e3c0b3184e4bac8b62b97bc022df88c7701b619
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-flink/Dockerfile
@@ -0,0 +1,5 @@
+FROM flink:1.13-java11
+
+ADD build/distributions/uc1-beam-flink.tar /opt/flink/usrlib/artifacts/uc1-beam-flink.tar
+
+
diff --git a/theodolite-benchmarks/uc1-beam-flink/build.gradle b/theodolite-benchmarks/uc1-beam-flink/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..f4b6cff8efbcdbcb701f249220643669f0f89626
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-flink/build.gradle
@@ -0,0 +1,9 @@
+plugins {
+  id 'theodolite.beam.flink'
+}
+
+dependencies {
+      implementation project(':uc1-beam')
+}
+
+mainClassName = "application.Uc1BeamFlink"
diff --git a/theodolite-benchmarks/uc1-beam-flink/src/main/java/application/Uc1BeamFlink.java b/theodolite-benchmarks/uc1-beam-flink/src/main/java/application/Uc1BeamFlink.java
new file mode 100644
index 0000000000000000000000000000000000000000..fe58369b3c0c19351bcc5cde170df68946af7cbd
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-flink/src/main/java/application/Uc1BeamFlink.java
@@ -0,0 +1,40 @@
+package application;
+
+import org.apache.beam.runners.flink.FlinkRunner;
+import theodolite.commons.beam.AbstractBeamService;
+
+/**
+ * Implementation of the use case Database Storage using Apache Beam with the Flink Runner. To
+ * execute locally in standalone start Kafka, Zookeeper, the schema-registry and the workload
+ * generator using the delayed_startup.sh script. Start a Flink cluster and pass its REST adress
+ * using--flinkMaster as run parameter. To persist logs add
+ * ${workspace_loc:/uc1-application-samza/eclipseConsoleLogs.log} as Output File under Standard
+ * Input Output in Common in the Run Configuration Start via Eclipse Run.
+ */
+public final class Uc1BeamFlink extends AbstractBeamService {
+
+  /**
+   * Private constructor setting specific options for this use case.
+   */
+  private Uc1BeamFlink(final String[] args) { //NOPMD
+    super(args);
+    this.options.setRunner(FlinkRunner.class);
+  }
+
+  /**
+   * Main method.
+   */
+  public static void main(final String[] args) {
+
+    // Create application via configurations
+    final Uc1BeamFlink uc1 = new Uc1BeamFlink(args);
+
+    // Create pipeline with configurations
+    final Uc1BeamPipeline pipeline = new Uc1BeamPipeline(uc1.options, uc1.getConfig());
+
+    // Submit job and start execution
+    pipeline.run().waitUntilFinish();
+  }
+
+}
+
diff --git a/theodolite-benchmarks/uc1-beam-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-beam-flink/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..50db1510ab5d7f6b8c9b1a75f112719209c351ce
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-flink/src/main/resources/META-INF/application.properties
@@ -0,0 +1,16 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+kafka.output.topic=output
+
+schema.registry.url=http://localhost:8081
+
+num.threads=1
+commit.interval.ms=1000
+cache.max.bytes.buffering=-1
+
+specific.avro.reader=True
+enable.auto.commit.config=True
+auto.offset.reset.config=earliest
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc1-beam-samza/.gitignore b/theodolite-benchmarks/uc1-beam-samza/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..7bf05dd280fcc888467656ce1fbdeb65322c7ba8
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-samza/.gitignore
@@ -0,0 +1 @@
+state
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc1-beam-samza/Dockerfile b/theodolite-benchmarks/uc1-beam-samza/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..9b729060532ea3a242ac3084ba0bebf88ca2e9b6
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-samza/Dockerfile
@@ -0,0 +1,9 @@
+FROM openjdk:11-slim
+
+ENV MAX_SOURCE_PARALLELISM=1024
+
+ADD build/distributions/uc1-beam-samza.tar /
+ADD samza-standalone.properties /
+
+CMD /uc1-beam-samza/bin/uc1-beam-samza --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
+
diff --git a/theodolite-benchmarks/uc1-beam-samza/build.gradle b/theodolite-benchmarks/uc1-beam-samza/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..54c070d967d17ccd41c85f90486655c9fd56b65b
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-samza/build.gradle
@@ -0,0 +1,10 @@
+plugins {
+  id 'theodolite.beam.samza'
+}
+
+dependencies {
+  implementation project(':uc1-beam')
+}
+
+
+mainClassName = "application.Uc1BeamSamza"
diff --git a/theodolite-benchmarks/uc1-beam-samza/samza-standalone.properties b/theodolite-benchmarks/uc1-beam-samza/samza-standalone.properties
new file mode 100644
index 0000000000000000000000000000000000000000..02411017e86e274f6057688a7c351567603c0f80
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-samza/samza-standalone.properties
@@ -0,0 +1,17 @@
+# Set EnvironmentRewriter
+job.config.rewriters=env-config
+job.config.rewriter.env-config.class=org.apache.samza.config.EnvironmentConfigRewriter
+
+# Configure ZooKeeper for coordination
+job.coordinator.factory=org.apache.samza.zk.ZkJobCoordinatorFactory
+job.coordinator.zk.connect=localhost:2181
+
+# Use GroupByContainerIds
+task.name.grouper.factory=org.apache.samza.container.grouper.task.GroupByContainerIdsFactory
+
+# Configure Kafka as "system"
+job.default.system=kafka
+systems.kafka.samza.factory=org.apache.samza.system.kafka.KafkaSystemFactory
+systems.kafka.consumer.bootstrap.servers=localhost:9092
+systems.kafka.producer.bootstrap.servers=localhost:9092
+systems.kafka.default.stream.replication.factor=1
diff --git a/theodolite-benchmarks/uc1-beam-samza/src/main/java/application/Uc1BeamSamza.java b/theodolite-benchmarks/uc1-beam-samza/src/main/java/application/Uc1BeamSamza.java
new file mode 100644
index 0000000000000000000000000000000000000000..aaef5c2d6968c4b89059537277a2582ecca70451
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-samza/src/main/java/application/Uc1BeamSamza.java
@@ -0,0 +1,43 @@
+package application;
+
+import org.apache.beam.runners.samza.SamzaRunner;
+import theodolite.commons.beam.AbstractBeamService;
+
+/**
+ * Implementation of the use case Database Storage using Apache Beam with the Samza Runner. To
+ * execute locally in standalone start Kafka, Zookeeper, the schema-registry and the workload
+ * generator. Add
+ * --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory
+ * --configFilePath=samza-standalone.properties
+ * --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=1024 as program arguments. To
+ * persist logs add ${workspace_loc:/uc4-application-samza/eclipseConsoleLogs.log} as Output File
+ * under Standard Input Output in Common in the Run Configuration Start via Eclipse Run.
+ */
+public final class Uc1BeamSamza extends AbstractBeamService {
+
+  /**
+   * Private constructor setting specific options for this use case.
+   */
+  private Uc1BeamSamza(final String[] args) { //NOPMD
+    super(args);
+    this.options.setRunner(SamzaRunner.class);
+  }
+
+  /**
+   * Main method.
+   */
+  public static void main(final String[] args) {
+
+    // Create application via configurations
+    final Uc1BeamSamza uc1 = new Uc1BeamSamza(args);
+
+    // Create pipeline with configurations
+    final Uc1BeamPipeline pipeline = new Uc1BeamPipeline(uc1.options, uc1.getConfig());
+
+    // Submit job and start execution
+    pipeline.run().waitUntilFinish();
+  }
+}
+
+
+
diff --git a/theodolite-benchmarks/uc1-beam-samza/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-beam-samza/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..50db1510ab5d7f6b8c9b1a75f112719209c351ce
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam-samza/src/main/resources/META-INF/application.properties
@@ -0,0 +1,16 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+kafka.output.topic=output
+
+schema.registry.url=http://localhost:8081
+
+num.threads=1
+commit.interval.ms=1000
+cache.max.bytes.buffering=-1
+
+specific.avro.reader=True
+enable.auto.commit.config=True
+auto.offset.reset.config=earliest
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc1-beam/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-beam/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..da2db2cefa90c0d974068e22804132eb6c11d824
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,283 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=false
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=false
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=false
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=false
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=true
+sp_cleanup.remove_redundant_type_arguments=true
+sp_cleanup.remove_trailing_whitespaces=false
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=false
+sp_cleanup.remove_unused_imports=false
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=true
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=true
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=false
+sp_cleanup.useless_return=false
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc1-beam/build.gradle b/theodolite-benchmarks/uc1-beam/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..502e94fa737fb2ae1bab861407b27575cd8766ca
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.beam'
+}
+
+
diff --git a/theodolite-benchmarks/uc1-beam/src/main/java/application/LogKeyValue.java b/theodolite-benchmarks/uc1-beam/src/main/java/application/LogKeyValue.java
new file mode 100644
index 0000000000000000000000000000000000000000..79566fd937b9c100663d426610b6ff476035ef87
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam/src/main/java/application/LogKeyValue.java
@@ -0,0 +1,27 @@
+package application;
+
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.values.KV;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Logs all Key Value pairs.
+ */
+@SuppressWarnings({"unused"})
+public class LogKeyValue extends DoFn<KV<String, String>, KV<String, String>> {
+  private static final long serialVersionUID = 4328743;
+  private static final Logger LOGGER = LoggerFactory.getLogger(LogKeyValue.class);
+
+  /**
+   * Logs all key value pairs it processes.
+   */
+  @ProcessElement
+  public void processElement(@Element final KV<String, String> kv,
+      final OutputReceiver<KV<String, String>> out) {
+    if (LOGGER.isInfoEnabled()) {
+      LOGGER.info("Key: {}, Value: {}", kv.getKey(), kv.getValue());
+    }
+    out.output(kv);
+  }
+}
diff --git a/theodolite-benchmarks/uc1-beam/src/main/java/application/MapToGson.java b/theodolite-benchmarks/uc1-beam/src/main/java/application/MapToGson.java
new file mode 100644
index 0000000000000000000000000000000000000000..6b0c6bc4ddfe78c22028da5b8cf7dde7ed57fced
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam/src/main/java/application/MapToGson.java
@@ -0,0 +1,26 @@
+package application;
+
+import com.google.gson.Gson;
+import org.apache.beam.sdk.transforms.SimpleFunction;
+import org.apache.beam.sdk.values.KV;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Converts a Map into a json String.
+ */
+public class MapToGson extends SimpleFunction<KV<String, ActivePowerRecord>, KV<String, String>> {
+  private static final long serialVersionUID = 7168356203579050214L;
+  private transient Gson gsonObj = new Gson();
+
+  @Override
+  public KV<String, String> apply(
+      final KV<String, ActivePowerRecord> kv) {
+
+    if (this.gsonObj == null) {
+      this.gsonObj = new Gson();
+    }
+
+    final String gson = this.gsonObj.toJson(kv.getValue());
+    return KV.of(kv.getKey(), gson);
+  }
+}
diff --git a/theodolite-benchmarks/uc1-beam/src/main/java/application/Uc1BeamPipeline.java b/theodolite-benchmarks/uc1-beam/src/main/java/application/Uc1BeamPipeline.java
new file mode 100644
index 0000000000000000000000000000000000000000..eaff08ac78cd18ddfd47eb2949ca13340ecc27b8
--- /dev/null
+++ b/theodolite-benchmarks/uc1-beam/src/main/java/application/Uc1BeamPipeline.java
@@ -0,0 +1,52 @@
+package application;
+
+import java.util.Map;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.CoderRegistry;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.commons.configuration2.Configuration;
+import theodolite.commons.beam.AbstractPipeline;
+import theodolite.commons.beam.kafka.KafkaActivePowerTimestampReader;
+import titan.ccp.model.records.ActivePowerRecord;
+
+
+/**
+ * Implementation of the use case Database Storage using Apache Beam with the Flink Runner. To
+ * execute locally in standalone start Kafka, Zookeeper, the schema-registry and the workload
+ * generator using the delayed_startup.sh script. Start a Flink cluster and pass its REST adress
+ * using--flinkMaster as run parameter. To persist logs add
+ * ${workspace_loc:/uc1-application-samza/eclipseConsoleLogs.log} as Output File under Standard
+ * Input Output in Common in the Run Configuration Start via Eclipse Run.
+ */
+public final class Uc1BeamPipeline extends AbstractPipeline {
+
+  protected Uc1BeamPipeline(final PipelineOptions options, final Configuration config) {
+    super(options, config);
+
+    // Set Coders for Classes that will be distributed
+    final CoderRegistry cr = this.getCoderRegistry();
+    cr.registerCoderForClass(ActivePowerRecord.class, AvroCoder.of(ActivePowerRecord.SCHEMA$));
+
+    // build KafkaConsumerConfig
+    final Map<String, Object> consumerConfig = this.buildConsumerConfig();
+
+    // Create Pipeline transformations
+    final KafkaActivePowerTimestampReader kafka =
+        new KafkaActivePowerTimestampReader(this.bootstrapServer, this.inputTopic, consumerConfig);
+
+    final LogKeyValue logKeyValue = new LogKeyValue();
+    final MapToGson mapToGson = new MapToGson();
+
+    // Apply pipeline transformations
+    // Read from Kafka
+    this.apply(kafka)
+        // Map to Gson
+        .apply(MapElements
+            .via(mapToGson))
+        // Print to console
+        .apply(ParDo.of(logKeyValue));
+  }
+}
+
diff --git a/theodolite-benchmarks/uc2-beam-flink/Dockerfile b/theodolite-benchmarks/uc2-beam-flink/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..b2ad8ff9036eef0f0cd0efad23d4f92abef7cc42
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-flink/Dockerfile
@@ -0,0 +1,4 @@
+FROM flink:1.13-java11
+
+ADD build/distributions/uc2-beam-flink.tar /opt/flink/usrlib/artifacts/uc2-beam-flink.tar
+
diff --git a/theodolite-benchmarks/uc2-beam-flink/build.gradle b/theodolite-benchmarks/uc2-beam-flink/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..9ab898cd465abe20e855d06ebf85373e46ab12e2
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-flink/build.gradle
@@ -0,0 +1,10 @@
+plugins {
+  id 'theodolite.beam.flink'
+}
+
+
+dependencies {
+  implementation project(':uc2-beam')
+}
+
+mainClassName = "application.Uc2BeamFlink"
diff --git a/theodolite-benchmarks/uc2-beam-flink/src/main/java/application/Uc2BeamFlink.java b/theodolite-benchmarks/uc2-beam-flink/src/main/java/application/Uc2BeamFlink.java
new file mode 100644
index 0000000000000000000000000000000000000000..f5bb849e626444929e00b17b1324a08c41cb19a0
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-flink/src/main/java/application/Uc2BeamFlink.java
@@ -0,0 +1,35 @@
+package application;
+
+import org.apache.beam.runners.flink.FlinkRunner;
+import org.apache.beam.sdk.Pipeline;
+import theodolite.commons.beam.AbstractBeamService;
+
+/**
+ * Implementation of the use case Downsampling using Apache Beam with the Flink Runner. To execute
+ * locally in standalone start Kafka, Zookeeper, the schema-registry and the workload generator
+ * using the delayed_startup.sh script. Start a Flink cluster and pass its REST adress
+ * using--flinkMaster as run parameter.
+ */
+public final class Uc2BeamFlink extends AbstractBeamService {
+
+  /**
+   * Private constructor setting specific options for this use case.
+   */
+  private Uc2BeamFlink(final String[] args) { // NOPMD
+    super(args);
+    this.options.setRunner(FlinkRunner.class);
+  }
+
+  /**
+   * Start running this microservice.
+   */
+  public static void main(final String[] args) {
+
+    final Uc2BeamFlink uc2BeamFlink = new Uc2BeamFlink(args);
+
+    final Pipeline pipeline = new Uc2BeamPipeline(uc2BeamFlink.options, uc2BeamFlink.getConfig());
+
+    pipeline.run().waitUntilFinish();
+  }
+}
+
diff --git a/theodolite-benchmarks/uc2-beam-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-beam-flink/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..1545a0f6630c8ea51d694f4056ca3aa750463f5b
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-flink/src/main/resources/META-INF/application.properties
@@ -0,0 +1,17 @@
+application.name=theodolite-uc2-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+kafka.output.topic=output
+kafka.window.duration.minutes=1
+
+schema.registry.url=http://localhost:8081
+
+num.threads=1
+commit.interval.ms=1000
+cache.max.bytes.buffering=-1
+
+specific.avro.reader=True
+enable.auto.commit.config=True
+auto.offset.reset.config=earliest
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc2-beam-samza/.gitignore b/theodolite-benchmarks/uc2-beam-samza/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..7bf05dd280fcc888467656ce1fbdeb65322c7ba8
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-samza/.gitignore
@@ -0,0 +1 @@
+state
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc2-beam-samza/Dockerfile b/theodolite-benchmarks/uc2-beam-samza/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..22855cea279819cacbf6eee253c30c60409fdba3
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-samza/Dockerfile
@@ -0,0 +1,8 @@
+FROM openjdk:11-slim
+
+ENV MAX_SOURCE_PARALLELISM=1024
+
+ADD build/distributions/uc2-beam-samza.tar /
+ADD samza-standalone.properties /
+
+CMD /uc2-beam-samza/bin/uc2-beam-samza --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
diff --git a/theodolite-benchmarks/uc2-beam-samza/build.gradle b/theodolite-benchmarks/uc2-beam-samza/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..29d7f9ac0c22c421072646ca665b3849c558d56f
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-samza/build.gradle
@@ -0,0 +1,10 @@
+plugins {
+  id 'theodolite.beam.samza'
+}
+
+
+dependencies {
+  implementation project(':uc2-beam')
+}
+
+mainClassName = "application.Uc2BeamSamza"
diff --git a/theodolite-benchmarks/uc2-beam-samza/samza-standalone.properties b/theodolite-benchmarks/uc2-beam-samza/samza-standalone.properties
new file mode 100644
index 0000000000000000000000000000000000000000..02411017e86e274f6057688a7c351567603c0f80
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-samza/samza-standalone.properties
@@ -0,0 +1,17 @@
+# Set EnvironmentRewriter
+job.config.rewriters=env-config
+job.config.rewriter.env-config.class=org.apache.samza.config.EnvironmentConfigRewriter
+
+# Configure ZooKeeper for coordination
+job.coordinator.factory=org.apache.samza.zk.ZkJobCoordinatorFactory
+job.coordinator.zk.connect=localhost:2181
+
+# Use GroupByContainerIds
+task.name.grouper.factory=org.apache.samza.container.grouper.task.GroupByContainerIdsFactory
+
+# Configure Kafka as "system"
+job.default.system=kafka
+systems.kafka.samza.factory=org.apache.samza.system.kafka.KafkaSystemFactory
+systems.kafka.consumer.bootstrap.servers=localhost:9092
+systems.kafka.producer.bootstrap.servers=localhost:9092
+systems.kafka.default.stream.replication.factor=1
diff --git a/theodolite-benchmarks/uc2-beam-samza/src/main/java/application/Uc2BeamSamza.java b/theodolite-benchmarks/uc2-beam-samza/src/main/java/application/Uc2BeamSamza.java
new file mode 100644
index 0000000000000000000000000000000000000000..d4b3d6d910824a718bffe8dc5f0204d53b9865c1
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-samza/src/main/java/application/Uc2BeamSamza.java
@@ -0,0 +1,39 @@
+package application;
+
+import org.apache.beam.runners.samza.SamzaRunner;
+import org.apache.beam.sdk.Pipeline;
+import theodolite.commons.beam.AbstractBeamService;
+
+/**
+ * Implementation of the use case Downsampling using Apache Beam with the Samza Runner. To run
+ * locally in standalone start Kafka, Zookeeper, the schema-registry and the workload generator
+ * using the delayed_startup.sh script. Add
+ * --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory
+ * --configFilePath=${workspace_loc:uc3-application-samza}/config/standalone_local.properties
+ * --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=1024 --as program arguments. To
+ * persist logs add ${workspace_loc:/uc3-application-samza/eclipseConsoleLogs.log} as Output File
+ * under Standard Input Output in Common in the Run Configuration Start via Eclipse Run.
+ */
+public final class Uc2BeamSamza extends AbstractBeamService {
+
+  /**
+   * Private constructor setting specific options for this use case.
+   */
+  private Uc2BeamSamza(final String[] args) { //NOPMD
+    super(args);
+    this.options.setRunner(SamzaRunner.class);
+  }
+
+  /**
+   * Start running this microservice.
+   */
+  public static void main(final String[] args) {
+
+    final Uc2BeamSamza uc2BeamSamza = new Uc2BeamSamza(args);
+
+    final Pipeline pipeline = new Uc2BeamPipeline(uc2BeamSamza.options, uc2BeamSamza.getConfig());
+
+    pipeline.run().waitUntilFinish();
+  }
+}
+
diff --git a/theodolite-benchmarks/uc2-beam-samza/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-beam-samza/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..1545a0f6630c8ea51d694f4056ca3aa750463f5b
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam-samza/src/main/resources/META-INF/application.properties
@@ -0,0 +1,17 @@
+application.name=theodolite-uc2-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+kafka.output.topic=output
+kafka.window.duration.minutes=1
+
+schema.registry.url=http://localhost:8081
+
+num.threads=1
+commit.interval.ms=1000
+cache.max.bytes.buffering=-1
+
+specific.avro.reader=True
+enable.auto.commit.config=True
+auto.offset.reset.config=earliest
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc2-beam/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc2-beam/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..43ccd3dbcdf80e49b8920c8fe242b35c3f604281
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,283 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=false
+sp_cleanup.always_use_this_for_non_static_method_access=false
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=false
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=true
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=false
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=true
+sp_cleanup.remove_redundant_type_arguments=true
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=true
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=true
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=true
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=false
+sp_cleanup.useless_return=false
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc2-beam/build.gradle b/theodolite-benchmarks/uc2-beam/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..502e94fa737fb2ae1bab861407b27575cd8766ca
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.beam'
+}
+
+
diff --git a/theodolite-benchmarks/uc2-beam/src/main/java/application/StatsAggregation.java b/theodolite-benchmarks/uc2-beam/src/main/java/application/StatsAggregation.java
new file mode 100644
index 0000000000000000000000000000000000000000..688f6677ec6d74e063a07a20c079b783aa71c399
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam/src/main/java/application/StatsAggregation.java
@@ -0,0 +1,45 @@
+package application;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import java.io.Serializable;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.DefaultCoder;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Aggregation Class for ActivePowerRecords. Creates a StatsAccumulator based on the ValueInW.
+ */
+
+@DefaultCoder(AvroCoder.class)
+public class StatsAggregation extends CombineFn<ActivePowerRecord, StatsAccumulator, Stats>
+    implements Serializable {
+
+  private static final long serialVersionUID = 1L;
+
+  @Override
+  public StatsAccumulator createAccumulator() {
+    return new StatsAccumulator();
+  }
+
+  @Override
+  public StatsAccumulator addInput(final StatsAccumulator accum, final ActivePowerRecord input) {
+    accum.add(input.getValueInW());
+    return accum;
+  }
+
+  @Override
+  public StatsAccumulator mergeAccumulators(final Iterable<StatsAccumulator> accums) {
+    final StatsAccumulator merged = createAccumulator();
+    for (final StatsAccumulator accum : accums) {
+      merged.addAll(accum.snapshot());
+    }
+    return merged;
+  }
+
+  @Override
+  public Stats extractOutput(final StatsAccumulator accum) {
+    return accum.snapshot();
+  }
+}
diff --git a/theodolite-benchmarks/uc2-beam/src/main/java/application/StatsToString.java b/theodolite-benchmarks/uc2-beam/src/main/java/application/StatsToString.java
new file mode 100644
index 0000000000000000000000000000000000000000..9a73ae8e1681b2c350dee328cba7283cf3386fd7
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam/src/main/java/application/StatsToString.java
@@ -0,0 +1,18 @@
+package application;
+
+import com.google.common.math.Stats;
+import org.apache.beam.sdk.transforms.SimpleFunction;
+import org.apache.beam.sdk.values.KV;
+
+/**
+ * Transforms a {@code KV<String, Stats>} into a {@code KV<String, String>}.
+ */
+public class StatsToString extends SimpleFunction<KV<String, Stats>, KV<String, String>> {
+  private static final long serialVersionUID = 4308991244493097240L;
+
+  @Override
+  public KV<String, String> apply(final KV<String, Stats> kv) {
+    return KV.of(kv.getKey(), kv.getValue().toString());
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-beam/src/main/java/application/Uc2BeamPipeline.java b/theodolite-benchmarks/uc2-beam/src/main/java/application/Uc2BeamPipeline.java
new file mode 100644
index 0000000000000000000000000000000000000000..02eec9868b0bbfbf6fd45206ff0d4092ac09e1ac
--- /dev/null
+++ b/theodolite-benchmarks/uc2-beam/src/main/java/application/Uc2BeamPipeline.java
@@ -0,0 +1,73 @@
+package application;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import java.util.Map;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.CoderRegistry;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.coders.SerializableCoder;
+import org.apache.beam.sdk.coders.StringUtf8Coder;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.windowing.FixedWindows;
+import org.apache.beam.sdk.transforms.windowing.Window;
+import org.apache.beam.sdk.values.KV;
+import org.apache.commons.configuration2.Configuration;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.joda.time.Duration;
+import theodolite.commons.beam.AbstractPipeline;
+import theodolite.commons.beam.ConfigurationKeys;
+import theodolite.commons.beam.kafka.KafkaActivePowerTimestampReader;
+import theodolite.commons.beam.kafka.KafkaWriterTransformation;
+import titan.ccp.model.records.ActivePowerRecord;
+
+
+/**
+ * Implementation of the use case Downsampling using Apache Beam.
+ */
+public final class Uc2BeamPipeline extends AbstractPipeline {
+
+  protected Uc2BeamPipeline(final PipelineOptions options, final Configuration config) {
+    super(options, config);
+    // Additional needed variables
+    final String outputTopic = config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+
+    final Duration duration =
+        Duration.standardMinutes(config.getInt(ConfigurationKeys.KAFKA_WINDOW_DURATION_MINUTES));
+
+    // Build kafka configuration
+    final Map<String, Object> consumerConfig = buildConsumerConfig();
+
+    // Set Coders for Classes that will be distributed
+    final CoderRegistry cr = getCoderRegistry();
+    cr.registerCoderForClass(ActivePowerRecord.class, AvroCoder.of(ActivePowerRecord.SCHEMA$));
+    cr.registerCoderForClass(StatsAggregation.class, SerializableCoder.of(StatsAggregation.class));
+    cr.registerCoderForClass(StatsAccumulator.class, AvroCoder.of(StatsAccumulator.class));
+
+    // Read from Kafka
+    final KafkaActivePowerTimestampReader kafkaActivePowerRecordReader =
+        new KafkaActivePowerTimestampReader(bootstrapServer, inputTopic, consumerConfig);
+
+    // Transform into String
+    final StatsToString statsToString = new StatsToString();
+
+    // Write to Kafka
+    final KafkaWriterTransformation<String> kafkaWriter =
+        new KafkaWriterTransformation<>(bootstrapServer, outputTopic, StringSerializer.class);
+
+    // Apply pipeline transformations
+    this.apply(kafkaActivePowerRecordReader)
+        // Apply a fixed window
+        .apply(Window.<KV<String, ActivePowerRecord>>into(FixedWindows.of(duration)))
+        // Aggregate per window for every key
+        .apply(Combine.<String, ActivePowerRecord, Stats>perKey(new StatsAggregation()))
+        .setCoder(KvCoder.of(StringUtf8Coder.of(), SerializableCoder.of(Stats.class)))
+        // Map into correct output format
+        .apply(MapElements.via(statsToString))
+        // Write to Kafka
+        .apply(kafkaWriter);
+  }
+}
+
diff --git a/theodolite-benchmarks/uc3-beam-flink/Dockerfile b/theodolite-benchmarks/uc3-beam-flink/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..3c0b4d96b8a59e45da3b85e8d7cd238b98cacfd7
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-flink/Dockerfile
@@ -0,0 +1,4 @@
+FROM flink:1.13-java11
+
+ADD build/distributions/uc3-beam-flink.tar /opt/flink/usrlib/artifacts/uc3-beam-flink.tar
+
diff --git a/theodolite-benchmarks/uc3-beam-flink/build.gradle b/theodolite-benchmarks/uc3-beam-flink/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..8f047c6dce50636f01a7cdf645722aa5f7ac9ce9
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-flink/build.gradle
@@ -0,0 +1,12 @@
+plugins {
+  id 'theodolite.beam.flink'
+}
+
+
+dependencies {
+  implementation project(':uc3-beam')
+}
+
+
+// This is the path of the main class, stored within ./src/main/java/
+mainClassName = 'application.Uc3BeamFlink'
diff --git a/theodolite-benchmarks/uc3-beam-flink/src/main/java/application/Uc3BeamFlink.java b/theodolite-benchmarks/uc3-beam-flink/src/main/java/application/Uc3BeamFlink.java
new file mode 100644
index 0000000000000000000000000000000000000000..18532b2655fcc6c24dad5f2fca87607c0b5d2e54
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-flink/src/main/java/application/Uc3BeamFlink.java
@@ -0,0 +1,39 @@
+package application;
+
+import org.apache.beam.runners.flink.FlinkRunner;
+import theodolite.commons.beam.AbstractBeamService;
+
+/**
+ * Implementation of the use case Aggregation based on Time Attributes using Apache Beam with the
+ * Flink Runner. To run locally in standalone start Kafka, Zookeeper, the schema-registry and the
+ * workload generator using the delayed_startup.sh script. And configure the Kafka, Zookeeper and
+ * Schema Registry urls accordingly. Start a Flink cluster and pass its REST adress
+ * using--flinkMaster as run parameter. To persist logs add
+ * ${workspace_loc:/uc4-application-samza/eclipseConsoleLogs.log} as Output File under Standard
+ * Input Output in Common in the Run Configuration Start via Eclipse Run.
+ */
+public final class Uc3BeamFlink extends AbstractBeamService {
+
+  /**
+   * Private constructor to avoid instantiation.
+   */
+  private Uc3BeamFlink(final String[] args) { //NOPMD
+    super(args);
+    this.options.setRunner(FlinkRunner.class);
+  }
+
+  /**
+   * Start running this microservice.
+   */
+  public static void main(final String[] args) {
+
+    final Uc3BeamFlink uc3BeamFlink = new Uc3BeamFlink(args);
+
+    final Uc3BeamPipeline pipeline =
+        new Uc3BeamPipeline(uc3BeamFlink.options, uc3BeamFlink.getConfig());
+
+    pipeline.run().waitUntilFinish();
+  }
+
+}
+
diff --git a/theodolite-benchmarks/uc3-beam-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-beam-flink/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..2db723927eaee10d39e02a6b2d369a06af7711fc
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-flink/src/main/resources/META-INF/application.properties
@@ -0,0 +1,22 @@
+application.name=theodolite-uc3-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+kafka.output.topic=output
+kafka.window.duration.minutes=1
+
+schema.registry.url=http://localhost:8081
+
+aggregation.duration.days=30
+aggregation.advance.days=1
+
+trigger.interval=15
+
+num.threads=1
+commit.interval.ms=1000
+cache.max.bytes.buffering=-1
+
+specific.avro.reader=True
+enable.auto.commit.config=True
+auto.offset.reset.config=earliest
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc3-beam-samza/.gitignore b/theodolite-benchmarks/uc3-beam-samza/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..7bf05dd280fcc888467656ce1fbdeb65322c7ba8
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-samza/.gitignore
@@ -0,0 +1 @@
+state
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc3-beam-samza/Dockerfile b/theodolite-benchmarks/uc3-beam-samza/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..d3e860bd72c54121d616bc5562d519e6e1e21dec
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-samza/Dockerfile
@@ -0,0 +1,8 @@
+FROM openjdk:11-slim
+
+ENV MAX_SOURCE_PARALLELISM=1024
+
+ADD build/distributions/uc3-beam-samza.tar /
+ADD samza-standalone.properties /
+
+CMD /uc3-beam-samza/bin/uc3-beam-samza --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
diff --git a/theodolite-benchmarks/uc3-beam-samza/build.gradle b/theodolite-benchmarks/uc3-beam-samza/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..513b850330f4f71f440ad5da4ecea95f092f5ccc
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-samza/build.gradle
@@ -0,0 +1,10 @@
+plugins {
+  id 'theodolite.beam.samza'
+}
+
+
+dependencies {
+  implementation project(':uc3-beam')
+}
+
+mainClassName = "application.Uc3BeamSamza"
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc3-beam-samza/samza-standalone.properties b/theodolite-benchmarks/uc3-beam-samza/samza-standalone.properties
new file mode 100644
index 0000000000000000000000000000000000000000..02411017e86e274f6057688a7c351567603c0f80
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-samza/samza-standalone.properties
@@ -0,0 +1,17 @@
+# Set EnvironmentRewriter
+job.config.rewriters=env-config
+job.config.rewriter.env-config.class=org.apache.samza.config.EnvironmentConfigRewriter
+
+# Configure ZooKeeper for coordination
+job.coordinator.factory=org.apache.samza.zk.ZkJobCoordinatorFactory
+job.coordinator.zk.connect=localhost:2181
+
+# Use GroupByContainerIds
+task.name.grouper.factory=org.apache.samza.container.grouper.task.GroupByContainerIdsFactory
+
+# Configure Kafka as "system"
+job.default.system=kafka
+systems.kafka.samza.factory=org.apache.samza.system.kafka.KafkaSystemFactory
+systems.kafka.consumer.bootstrap.servers=localhost:9092
+systems.kafka.producer.bootstrap.servers=localhost:9092
+systems.kafka.default.stream.replication.factor=1
diff --git a/theodolite-benchmarks/uc3-beam-samza/src/main/java/application/Uc3BeamSamza.java b/theodolite-benchmarks/uc3-beam-samza/src/main/java/application/Uc3BeamSamza.java
new file mode 100644
index 0000000000000000000000000000000000000000..913293bd02cb16e14ee9d94ea0e161c74853e72a
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-samza/src/main/java/application/Uc3BeamSamza.java
@@ -0,0 +1,39 @@
+package application;
+
+import org.apache.beam.runners.samza.SamzaRunner;
+import theodolite.commons.beam.AbstractBeamService;
+
+/**
+ * Implementation of the use case Aggregation based on Time Attributes using Apache Beam with the
+ * Samza Runner. To run locally in standalone start Kafka, Zookeeper, the schema-registry and the
+ * workload generator using the delayed_startup.sh script. And configure the Kafka, Zookeeper and
+ * Schema Registry urls accordingly. Start a Flink cluster and pass its REST adress
+ * using--flinkMaster as run parameter. To persist logs add
+ * ${workspace_loc:/uc4-application-samza/eclipseConsoleLogs.log} as Output File under Standard
+ * Input Output in Common in the Run Configuration Start via Eclipse Run.
+ */
+public final class Uc3BeamSamza extends AbstractBeamService {
+
+  /**
+   * Private constructor to avoid instantiation.
+   */
+  private Uc3BeamSamza(final String[] args) { //NOPMD
+    super(args);
+    this.options.setRunner(SamzaRunner.class);
+  }
+
+  /**
+   * Start running this microservice.
+   */
+  public static void main(final String[] args) {
+
+    final Uc3BeamSamza uc3BeamSamza = new Uc3BeamSamza(args);
+
+    final Uc3BeamPipeline pipeline =
+        new Uc3BeamPipeline(uc3BeamSamza.options, uc3BeamSamza.getConfig());
+
+    pipeline.run().waitUntilFinish();
+  }
+
+}
+
diff --git a/theodolite-benchmarks/uc3-beam-samza/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-beam-samza/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..2db723927eaee10d39e02a6b2d369a06af7711fc
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam-samza/src/main/resources/META-INF/application.properties
@@ -0,0 +1,22 @@
+application.name=theodolite-uc3-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+kafka.output.topic=output
+kafka.window.duration.minutes=1
+
+schema.registry.url=http://localhost:8081
+
+aggregation.duration.days=30
+aggregation.advance.days=1
+
+trigger.interval=15
+
+num.threads=1
+commit.interval.ms=1000
+cache.max.bytes.buffering=-1
+
+specific.avro.reader=True
+enable.auto.commit.config=True
+auto.offset.reset.config=earliest
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc3-beam/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc3-beam/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..d71754b65d8da3cee7e6e440f49aa833ddabae10
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,283 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=false
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=false
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=false
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=true
+sp_cleanup.remove_redundant_type_arguments=true
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=false
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=true
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=true
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=false
+sp_cleanup.useless_return=false
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc3-beam/build.gradle b/theodolite-benchmarks/uc3-beam/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..502e94fa737fb2ae1bab861407b27575cd8766ca
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.beam'
+}
+
+
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayKey.java b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayKey.java
new file mode 100644
index 0000000000000000000000000000000000000000..6db59dd65ee494157400b0f4c6eafbdd7655d402
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayKey.java
@@ -0,0 +1,34 @@
+package application;
+
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.DefaultCoder;
+
+
+/**
+ * Composed key of an hour of the day and a sensor id.
+ */
+@DefaultCoder(AvroCoder.class)
+public class HourOfDayKey {
+
+  private final int hourOfDay;
+  private final String sensorId;
+
+  public HourOfDayKey(final int hourOfDay, final String sensorId) {
+    this.hourOfDay = hourOfDay;
+    this.sensorId = sensorId;
+  }
+
+  public int getHourOfDay() {
+    return this.hourOfDay;
+  }
+
+  public String getSensorId() {
+    return this.sensorId;
+  }
+
+  @Override
+  public String toString() {
+    return this.sensorId + ";" + this.hourOfDay;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayKeyFactory.java b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayKeyFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..b993a0199bc13e9b416f9b9cb77a27635d7fe1e1
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayKeyFactory.java
@@ -0,0 +1,24 @@
+package application;
+
+import java.io.Serializable;
+import java.time.LocalDateTime;
+
+/**
+ * {@link StatsKeyFactory} for {@link HourOfDayKey}.
+ */
+public class HourOfDayKeyFactory implements StatsKeyFactory<HourOfDayKey>, Serializable {
+
+  private static final long serialVersionUID = 1L;
+
+  @Override
+  public HourOfDayKey createKey(final String sensorId, final LocalDateTime dateTime) {
+    final int hourOfDay = dateTime.getHour();
+    return new HourOfDayKey(hourOfDay, sensorId);
+  }
+
+  @Override
+  public String getSensorId(final HourOfDayKey key) {
+    return key.getSensorId();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayKeySerde.java b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayKeySerde.java
new file mode 100644
index 0000000000000000000000000000000000000000..a0f8e0bbaf959154bfbab69d83da56c81d55802a
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayKeySerde.java
@@ -0,0 +1,32 @@
+package application;
+
+import org.apache.kafka.common.serialization.Serde;
+import titan.ccp.common.kafka.simpleserdes.BufferSerde;
+import titan.ccp.common.kafka.simpleserdes.ReadBuffer;
+import titan.ccp.common.kafka.simpleserdes.SimpleSerdes;
+import titan.ccp.common.kafka.simpleserdes.WriteBuffer;
+
+/**
+ * {@link BufferSerde} for a {@link HourOfDayKey}. Use the {@link #create()} method to create a new
+ * Kafka {@link Serde}.
+ */
+public class HourOfDayKeySerde implements BufferSerde<HourOfDayKey> {
+
+  @Override
+  public void serialize(final WriteBuffer buffer, final HourOfDayKey data) {
+    buffer.putInt(data.getHourOfDay());
+    buffer.putString(data.getSensorId());
+  }
+
+  @Override
+  public HourOfDayKey deserialize(final ReadBuffer buffer) {
+    final int hourOfDay = buffer.getInt();
+    final String sensorId = buffer.getString();
+    return new HourOfDayKey(hourOfDay, sensorId);
+  }
+
+  public static Serde<HourOfDayKey> create() {
+    return SimpleSerdes.create(new HourOfDayKeySerde());
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayWithStats.java b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayWithStats.java
new file mode 100644
index 0000000000000000000000000000000000000000..46232b3f13601d77f6cb7b13ea0bcdc31290357a
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDayWithStats.java
@@ -0,0 +1,19 @@
+package application;
+
+import com.google.common.math.Stats;
+import org.apache.beam.sdk.transforms.SimpleFunction;
+import org.apache.beam.sdk.values.KV;
+
+/**
+ * {@link SimpleFunction} that transforms into the sensorId and the Value.
+ */
+public class HourOfDayWithStats extends
+    SimpleFunction<KV<HourOfDayKey, Stats>, KV<String, String>> {
+  private static final long serialVersionUID = -7411154345437422919L;
+  private final HourOfDayKeyFactory keyFactory = new HourOfDayKeyFactory();
+
+  @Override
+  public KV<String, String> apply(final KV<HourOfDayKey, Stats> kv) {
+    return KV.of(keyFactory.getSensorId(kv.getKey()), kv.getValue().toString());
+  }
+}
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDaykeyCoder.java b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDaykeyCoder.java
new file mode 100644
index 0000000000000000000000000000000000000000..196408263ec29a1ec9b45375dd0b53a18e8f60b3
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/application/HourOfDaykeyCoder.java
@@ -0,0 +1,60 @@
+package application;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.List;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.CoderException;
+import org.apache.kafka.common.serialization.Serde;
+
+/**
+ * Wrapper Class that encapsulates a HourOfDayKeySerde in a org.apache.beam.sdk.coders.Coder.
+ */
+public class HourOfDaykeyCoder extends Coder<HourOfDayKey> implements Serializable {
+  public static final long serialVersionUID = 4444444;
+  private static final boolean DETERMINISTIC = true;
+  private static final int VALUE_SIZE = 4;
+
+  private transient Serde<HourOfDayKey> innerSerde = HourOfDayKeySerde.create();
+
+  @Override
+  public void encode(final HourOfDayKey value, final OutputStream outStream)
+      throws CoderException, IOException {
+    if (this.innerSerde == null) {
+      this.innerSerde = HourOfDayKeySerde.create();
+    }
+    final byte[] bytes = this.innerSerde.serializer().serialize("ser", value);
+    final byte[] sizeinBytes = ByteBuffer.allocate(VALUE_SIZE).putInt(bytes.length).array();
+    outStream.write(sizeinBytes);
+    outStream.write(bytes);
+  }
+
+  @Override
+  public HourOfDayKey decode(final InputStream inStream) throws CoderException, IOException {
+    if (this.innerSerde == null) {
+      this.innerSerde = HourOfDayKeySerde.create();
+    }
+    final byte[] sizeinBytes = new byte[VALUE_SIZE];
+    inStream.read(sizeinBytes);
+    final int size = ByteBuffer.wrap(sizeinBytes).getInt();
+    final byte[] bytes = new byte[size];
+    inStream.read(bytes);
+    return this.innerSerde.deserializer().deserialize("deser", bytes);
+  }
+
+  @Override
+  public List<? extends Coder<?>> getCoderArguments() {
+    return Collections.emptyList();
+  }
+
+  @Override
+  public void verifyDeterministic() throws NonDeterministicException {
+    if (!DETERMINISTIC) {
+      throw new NonDeterministicException(this, "This class is not deterministic!");
+    }
+  }
+}
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/application/MapTimeFormat.java b/theodolite-benchmarks/uc3-beam/src/main/java/application/MapTimeFormat.java
new file mode 100644
index 0000000000000000000000000000000000000000..7d8897fb0fd76cd4eb145da6a7ce031f9f45d396
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/application/MapTimeFormat.java
@@ -0,0 +1,27 @@
+package application;
+
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import org.apache.beam.sdk.transforms.SimpleFunction;
+import org.apache.beam.sdk.values.KV;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Changes the time format to us europe/paris time.
+ */
+public class MapTimeFormat
+    extends SimpleFunction<KV<String, ActivePowerRecord>, KV<HourOfDayKey, ActivePowerRecord>> {
+  private static final long serialVersionUID = -6597391279968647035L;
+  private final StatsKeyFactory<HourOfDayKey> keyFactory = new HourOfDayKeyFactory();
+  private final ZoneId zone = ZoneId.of("Europe/Paris");
+
+  @Override
+  public KV<HourOfDayKey, ActivePowerRecord> apply(
+      final KV<String, ActivePowerRecord> kv) {
+    final Instant instant = Instant.ofEpochMilli(kv.getValue().getTimestamp());
+    final LocalDateTime dateTime = LocalDateTime.ofInstant(instant, this.zone);
+    return KV.of(this.keyFactory.createKey(kv.getValue().getIdentifier(), dateTime),
+        kv.getValue());
+  }
+}
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/application/StatsAggregation.java b/theodolite-benchmarks/uc3-beam/src/main/java/application/StatsAggregation.java
new file mode 100644
index 0000000000000000000000000000000000000000..ee5cfc48bcd42dec41dd2030ad3f4a730fd6ac85
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/application/StatsAggregation.java
@@ -0,0 +1,45 @@
+package application;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import java.io.Serializable;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.DefaultCoder;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import titan.ccp.model.records.ActivePowerRecord;
+
+
+/**
+ * Aggregation Class for ActivePowerRecords. Creates a StatsAccumulator based on the ValueInW.
+ */
+
+@DefaultCoder(AvroCoder.class)
+public class StatsAggregation extends CombineFn<ActivePowerRecord, StatsAccumulator, Stats>
+    implements Serializable {
+  private static final long serialVersionUID = 1L;
+
+  @Override
+  public StatsAccumulator createAccumulator() {
+    return new StatsAccumulator();
+  }
+
+  @Override
+  public StatsAccumulator addInput(final StatsAccumulator accum, final ActivePowerRecord input) {
+    accum.add(input.getValueInW());
+    return accum;
+  }
+
+  @Override
+  public StatsAccumulator mergeAccumulators(final Iterable<StatsAccumulator> accums) {
+    final StatsAccumulator merged = this.createAccumulator();
+    for (final StatsAccumulator accum : accums) {
+      merged.addAll(accum.snapshot());
+    }
+    return merged;
+  }
+
+  @Override
+  public Stats extractOutput(final StatsAccumulator accum) {
+    return accum.snapshot();
+  }
+}
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/application/StatsKeyFactory.java b/theodolite-benchmarks/uc3-beam/src/main/java/application/StatsKeyFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..820168058f88ca21f8efcf61c7ebed60c08aa200
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/application/StatsKeyFactory.java
@@ -0,0 +1,17 @@
+package application;
+
+import java.time.LocalDateTime;
+
+/**
+ * Factory interface for creating a stats key from a sensor id and a {@link LocalDateTime} object
+ * and vice versa.
+ *
+ * @param <T> Type of the key
+ */
+public interface StatsKeyFactory<T> {
+
+  T createKey(String sensorId, LocalDateTime dateTime);
+
+  String getSensorId(T key);
+
+}
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/application/Uc3BeamPipeline.java b/theodolite-benchmarks/uc3-beam/src/main/java/application/Uc3BeamPipeline.java
new file mode 100644
index 0000000000000000000000000000000000000000..c402271777dd63026e1f1fb36855dad1a72e1136
--- /dev/null
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/application/Uc3BeamPipeline.java
@@ -0,0 +1,102 @@
+package application;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import java.util.Map;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.CoderRegistry;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.coders.SerializableCoder;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.windowing.AfterProcessingTime;
+import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
+import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
+import org.apache.beam.sdk.transforms.windowing.Window;
+import org.apache.beam.sdk.values.KV;
+import org.apache.commons.configuration2.Configuration;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.joda.time.Duration;
+import theodolite.commons.beam.AbstractPipeline;
+import theodolite.commons.beam.ConfigurationKeys;
+import theodolite.commons.beam.kafka.KafkaActivePowerTimestampReader;
+import theodolite.commons.beam.kafka.KafkaWriterTransformation;
+import titan.ccp.model.records.ActivePowerRecord;
+
+
+/**
+ * Implementation of the use case Aggregation based on Time Attributes using Apache Beam.
+ */
+public final class Uc3BeamPipeline extends AbstractPipeline {
+
+  protected Uc3BeamPipeline(final PipelineOptions options, final Configuration config) {
+    super(options, config);
+    // Additional needed variables
+    final String outputTopic = config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+
+    final Duration duration =
+        Duration.standardDays(config.getInt(ConfigurationKeys.AGGREGATION_DURATION_DAYS));
+    final Duration aggregationAdvanceDuration =
+        Duration.standardDays(config.getInt(ConfigurationKeys.AGGREGATION_ADVANCE_DAYS));
+    final Duration triggerDelay =
+        Duration.standardSeconds(config.getInt(ConfigurationKeys.TRIGGER_INTERVAL));
+
+    // Build Kafka configuration
+    final Map<String, Object> consumerConfig = this.buildConsumerConfig();
+
+    // Set Coders for classes that will be distributed
+    final CoderRegistry cr = this.getCoderRegistry();
+    registerCoders(cr);
+
+    // Read from Kafka
+    final KafkaActivePowerTimestampReader kafka =
+        new KafkaActivePowerTimestampReader(this.bootstrapServer, this.inputTopic, consumerConfig);
+
+    // Map the time format
+    final MapTimeFormat mapTimeFormat = new MapTimeFormat();
+
+    // Get the stats per HourOfDay
+    final HourOfDayWithStats hourOfDayWithStats = new HourOfDayWithStats();
+
+    // Write to Kafka
+    final KafkaWriterTransformation<String> kafkaWriter =
+        new KafkaWriterTransformation<>(this.bootstrapServer, outputTopic, StringSerializer.class);
+
+    this.apply(kafka)
+        // Map to correct time format
+        .apply(MapElements.via(mapTimeFormat))
+        // Apply a sliding window
+        .apply(Window
+            .<KV<HourOfDayKey, ActivePowerRecord>>into(
+                SlidingWindows.of(duration).every(aggregationAdvanceDuration))
+            .triggering(AfterWatermark.pastEndOfWindow()
+                .withEarlyFirings(
+                    AfterProcessingTime.pastFirstElementInPane().plusDelayOf(triggerDelay)))
+            .withAllowedLateness(Duration.ZERO)
+            .accumulatingFiredPanes())
+
+        // Aggregate per window for every key
+        .apply(Combine.<HourOfDayKey, ActivePowerRecord, Stats>perKey(new StatsAggregation()))
+        .setCoder(KvCoder.of(new HourOfDaykeyCoder(), SerializableCoder.of(Stats.class)))
+
+        // Map into correct output format
+        .apply(MapElements.via(hourOfDayWithStats))
+        // Write to Kafka
+        .apply(kafkaWriter);
+  }
+
+
+  /**
+   * Registers all Coders for all needed Coders.
+   *
+   * @param cr CoderRegistry.
+   */
+  private static void registerCoders(final CoderRegistry cr) {
+    cr.registerCoderForClass(ActivePowerRecord.class, AvroCoder.of(ActivePowerRecord.SCHEMA$));
+    cr.registerCoderForClass(HourOfDayKey.class, new HourOfDaykeyCoder());
+    cr.registerCoderForClass(StatsAggregation.class, SerializableCoder.of(StatsAggregation.class));
+    cr.registerCoderForClass(StatsAccumulator.class, AvroCoder.of(StatsAccumulator.class));
+  }
+}
+
diff --git a/theodolite-benchmarks/uc4-beam-flink/Dockerfile b/theodolite-benchmarks/uc4-beam-flink/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..9c238ddbaccbe9040571e18ac8ad8eef5b7ecf15
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-flink/Dockerfile
@@ -0,0 +1,4 @@
+FROM flink:1.13-java11
+
+ADD build/distributions/uc4-beam-flink.tar /opt/flink/usrlib/artifacts/uc4-beam-flink.tar
+
diff --git a/theodolite-benchmarks/uc4-beam-flink/build.gradle b/theodolite-benchmarks/uc4-beam-flink/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..7ace89a2275e29e22186f7b67dcb7816cc7a85d0
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-flink/build.gradle
@@ -0,0 +1,10 @@
+plugins {
+  id 'theodolite.beam.flink'
+}
+
+
+dependencies {
+  implementation project(':uc4-beam')
+}
+
+mainClassName = 'application.Uc4BeamFlink'
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-beam-flink/src/main/java/application/Uc4BeamFlink.java b/theodolite-benchmarks/uc4-beam-flink/src/main/java/application/Uc4BeamFlink.java
new file mode 100644
index 0000000000000000000000000000000000000000..90f9a4a292e99526fa94c7dd512bdcec548fbb4f
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-flink/src/main/java/application/Uc4BeamFlink.java
@@ -0,0 +1,34 @@
+package application;
+
+import org.apache.beam.runners.flink.FlinkRunner;
+import org.apache.beam.sdk.Pipeline;
+import theodolite.commons.beam.AbstractBeamService;
+
+/**
+ * Implementation of the use case Hierarchical Aggregation using Apache Beam with the Flink
+ * Runner.
+ **/
+public final class Uc4BeamFlink extends AbstractBeamService {
+
+
+  /**
+   * Private constructor setting specific options for this use case.
+   */
+  private Uc4BeamFlink(final String[] args) { //NOPMD
+    super(args);
+    this.options.setRunner(FlinkRunner.class);
+  }
+
+  /**
+   * Start running this microservice.
+   */
+  public static void main(final String[] args) {
+
+    final Uc4BeamFlink uc4BeamFlink = new Uc4BeamFlink(args);
+
+    final Pipeline pipeline = new Uc4BeamPipeline(uc4BeamFlink.options, uc4BeamFlink.getConfig());
+
+    pipeline.run().waitUntilFinish();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-beam-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-beam-flink/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..bc679580dadf969e181b6787e8287066426be7e2
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-flink/src/main/resources/META-INF/application.properties
@@ -0,0 +1,25 @@
+application.name=theodolite-uc4-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+kafka.output.topic=output
+kafka.configuration.topic=configuration
+kafka.feedback.topic=aggregation-feedback
+kafka.window.duration.minutes=1
+
+schema.registry.url=http://localhost:8081
+
+aggregation.duration.days=30
+aggregation.advance.days=1
+
+trigger.interval=15
+grace.period.ms=270
+
+num.threads=1
+commit.interval.ms=1000
+cache.max.bytes.buffering=-1
+
+specific.avro.reader=True
+enable.auto.commit.config=True
+auto.offset.reset.config=earliest
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-beam-samza/.gitignore b/theodolite-benchmarks/uc4-beam-samza/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..7bf05dd280fcc888467656ce1fbdeb65322c7ba8
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-samza/.gitignore
@@ -0,0 +1 @@
+state
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-beam-samza/Dockerfile b/theodolite-benchmarks/uc4-beam-samza/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..ffb82ee09e99384d4914a0f86b6d9214fc161381
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-samza/Dockerfile
@@ -0,0 +1,8 @@
+FROM openjdk:11-slim
+
+ENV MAX_SOURCE_PARALLELISM=1024
+
+ADD build/distributions/uc4-beam-samza.tar /
+ADD samza-standalone.properties /
+
+CMD /uc4-beam-samza/bin/uc4-beam-samza --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory --configFilePath=samza-standalone.properties --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=$MAX_SOURCE_PARALLELISM --enableMetrics=false --configOverride="{\"job.coordinator.zk.connect\":\"$SAMZA_JOB_COORDINATOR_ZK_CONNECT\"}"
diff --git a/theodolite-benchmarks/uc4-beam-samza/build.gradle b/theodolite-benchmarks/uc4-beam-samza/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..90dcc76b0cae5aeca61f76d0cb73347b2407408b
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-samza/build.gradle
@@ -0,0 +1,10 @@
+plugins {
+  id 'theodolite.beam.samza'
+}
+
+dependencies {
+  implementation project(':uc4-beam')
+}
+
+
+mainClassName = "application.Uc4BeamSamza"
diff --git a/theodolite-benchmarks/uc4-beam-samza/samza-standalone.properties b/theodolite-benchmarks/uc4-beam-samza/samza-standalone.properties
new file mode 100644
index 0000000000000000000000000000000000000000..812a9784c25ea84922b8a0f96f17b0377cedf925
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-samza/samza-standalone.properties
@@ -0,0 +1,23 @@
+# Set EnvironmentRewriter
+job.config.rewriters=env-config
+job.config.rewriter.env-config.class=org.apache.samza.config.EnvironmentConfigRewriter
+
+# Configure ZooKeeper for coordination
+job.coordinator.factory=org.apache.samza.zk.ZkJobCoordinatorFactory
+job.coordinator.zk.connect=localhost:2181
+
+# Use GroupByContainerIds
+task.name.grouper.factory=org.apache.samza.container.grouper.task.GroupByContainerIdsFactory
+
+# Configure Kafka as "system"
+job.default.system=kafka
+systems.kafka.samza.factory=org.apache.samza.system.kafka.KafkaSystemFactory
+systems.kafka.consumer.bootstrap.servers=localhost:9092
+systems.kafka.producer.bootstrap.servers=localhost:9092
+systems.kafka.default.stream.replication.factor=1
+
+# Configure serialization and stores
+serializers.registry.string.class=org.apache.samza.serializers.StringSerdeFactory
+stores.my-store.factory=org.apache.samza.storage.kv.RocksDbKeyValueStorageEngineFactory
+stores.my-store.key.serde=string
+stores.my-store.msg.serde=string
diff --git a/theodolite-benchmarks/uc4-beam-samza/src/main/java/application/Uc4BeamSamza.java b/theodolite-benchmarks/uc4-beam-samza/src/main/java/application/Uc4BeamSamza.java
new file mode 100644
index 0000000000000000000000000000000000000000..3894fa95f16253e0a165dde70bf25d4a4bee96cb
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-samza/src/main/java/application/Uc4BeamSamza.java
@@ -0,0 +1,40 @@
+package application;
+
+import org.apache.beam.runners.samza.SamzaRunner;
+import org.apache.beam.sdk.Pipeline;
+import theodolite.commons.beam.AbstractBeamService;
+
+/**
+ * Implementation of the use case Hierarchical Aggregation using Apache Beam with the Samza
+ * Runner. To run locally in standalone start Kafka, Zookeeper, the schema-registry and the
+ * workload generator using the delayed_startup.sh script. Add
+ * --configFactory=org.apache.samza.config.factories.PropertiesConfigFactory
+ * --configFilePath=${workspace_loc:uc4-application-samza}/config/standalone_local.properties
+ * --samzaExecutionEnvironment=STANDALONE --maxSourceParallelism=1024 --as program arguments. To
+ * persist logs add ${workspace_loc:/uc4-application-samza/eclipseConsoleLogs.log} as Output File
+ * under Standard Input Output in Common in the Run Configuration Start via Eclipse Run.
+ */
+public final class Uc4BeamSamza extends AbstractBeamService {
+
+
+  /**
+   * Private constructor setting specific options for this use case.
+   */
+  private Uc4BeamSamza(final String[] args) { //NOPMD
+    super(args);
+    this.options.setRunner(SamzaRunner.class);
+  }
+
+  /**
+   * Start running this microservice.
+   */
+  public static void main(final String[] args) {
+
+    final Uc4BeamSamza uc4BeamSamza = new Uc4BeamSamza(args);
+
+    final Pipeline pipeline = new Uc4BeamPipeline(uc4BeamSamza.options, uc4BeamSamza.getConfig());
+
+    pipeline.run().waitUntilFinish();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-beam-samza/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-beam-samza/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..bc679580dadf969e181b6787e8287066426be7e2
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam-samza/src/main/resources/META-INF/application.properties
@@ -0,0 +1,25 @@
+application.name=theodolite-uc4-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+kafka.output.topic=output
+kafka.configuration.topic=configuration
+kafka.feedback.topic=aggregation-feedback
+kafka.window.duration.minutes=1
+
+schema.registry.url=http://localhost:8081
+
+aggregation.duration.days=30
+aggregation.advance.days=1
+
+trigger.interval=15
+grace.period.ms=270
+
+num.threads=1
+commit.interval.ms=1000
+cache.max.bytes.buffering=-1
+
+specific.avro.reader=True
+enable.auto.commit.config=True
+auto.offset.reset.config=earliest
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-beam/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc4-beam/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..32e18c393f53a0b0b3207bb896ec0e4211b27bf0
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,284 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=false
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=false
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=false
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=false
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=true
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=false
+sp_cleanup.useless_return=false
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc4-beam/build.gradle b/theodolite-benchmarks/uc4-beam/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..502e94fa737fb2ae1bab861407b27575cd8766ca
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.beam'
+}
+
+
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/AggregatedActivePowerRecordEventTimePolicy.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/AggregatedActivePowerRecordEventTimePolicy.java
new file mode 100644
index 0000000000000000000000000000000000000000..dad9eca7d8d50d1b85932ddaa7ffc99418a4b759
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/AggregatedActivePowerRecordEventTimePolicy.java
@@ -0,0 +1,34 @@
+package application;
+
+import java.util.Optional;
+import org.apache.beam.sdk.io.kafka.KafkaRecord;
+import org.apache.beam.sdk.io.kafka.TimestampPolicy;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.joda.time.Instant;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+
+/**
+ * TimeStampPolicy to use event time based on the timestamp of the record value.
+ */
+public class AggregatedActivePowerRecordEventTimePolicy
+    extends TimestampPolicy<String, AggregatedActivePowerRecord> {
+  protected Instant currentWatermark;
+
+  public AggregatedActivePowerRecordEventTimePolicy(final Optional<Instant> previousWatermark) {
+    super();
+    this.currentWatermark = previousWatermark.orElse(BoundedWindow.TIMESTAMP_MIN_VALUE);
+  }
+
+  @Override
+  public Instant getTimestampForRecord(final PartitionContext ctx,
+      final KafkaRecord<String, AggregatedActivePowerRecord> record) {
+    this.currentWatermark = new Instant(record.getKV().getValue().getTimestamp());
+    return this.currentWatermark;
+  }
+
+  @Override
+  public Instant getWatermark(final PartitionContext ctx) {
+    return this.currentWatermark;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/AggregatedToActive.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/AggregatedToActive.java
new file mode 100644
index 0000000000000000000000000000000000000000..bddd43e1e09c54bcfc85e5cbb65d1a6487f53438
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/AggregatedToActive.java
@@ -0,0 +1,22 @@
+package application;
+
+import org.apache.beam.sdk.transforms.SimpleFunction;
+import org.apache.beam.sdk.values.KV;
+import titan.ccp.model.records.ActivePowerRecord;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+
+/**
+ * Converts AggregatedActivePowerRecord to ActivePowerRecord.
+ */
+public class AggregatedToActive
+    extends SimpleFunction<KV<String, AggregatedActivePowerRecord>, KV<String, ActivePowerRecord>> {
+
+  private static final long serialVersionUID = -8275252527964065889L;
+
+  @Override
+  public KV<String, ActivePowerRecord> apply(
+      final KV<String, AggregatedActivePowerRecord> kv) {
+    return KV.of(kv.getKey(), new ActivePowerRecord(kv.getValue().getIdentifier(),
+        kv.getValue().getTimestamp(), kv.getValue().getSumInW()));
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/DuplicateAsFlatMap.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/DuplicateAsFlatMap.java
new file mode 100644
index 0000000000000000000000000000000000000000..7b66082c91b87c246d8c834249d2bc82545766f5
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/DuplicateAsFlatMap.java
@@ -0,0 +1,69 @@
+package application;
+
+import com.google.common.base.MoreObjects;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import org.apache.beam.sdk.state.StateSpec;
+import org.apache.beam.sdk.state.StateSpecs;
+import org.apache.beam.sdk.state.ValueState;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollectionView;
+import titan.ccp.model.records.ActivePowerRecord;
+
+
+/**
+ * Duplicates the Kv containing the (Children,Parents) pair as a flat map.
+ */
+public class DuplicateAsFlatMap extends DoFn
+    <KV<String, ActivePowerRecord>, KV<SensorParentKey, ActivePowerRecord>> {
+  private static final long serialVersionUID = -5132355515723961647L;
+  @StateId("parents")
+  private final StateSpec<ValueState<Set<String>>> parents = StateSpecs.value();//NOPMD
+  private final PCollectionView<Map<String, Set<String>>> childParentPairMap;
+
+  public DuplicateAsFlatMap(final PCollectionView<Map<String, Set<String>>> childParentPairMap) {
+    super();
+    this.childParentPairMap = childParentPairMap;
+  }
+
+
+  /**
+   *  Generate a KV-pair for every child-parent match.
+   */
+  @ProcessElement
+  public void processElement(@Element final KV<String, ActivePowerRecord> kv,
+                             final OutputReceiver<KV<SensorParentKey, ActivePowerRecord>> out,
+                             @StateId("parents") final ValueState<Set<String>> state,
+                             final ProcessContext c) {
+
+    final ActivePowerRecord record = kv.getValue() == null ? null : kv.getValue();
+    final Set<String> newParents =
+        c.sideInput(childParentPairMap).get(kv.getKey()) == null
+            ? Collections.emptySet()
+            : c.sideInput(childParentPairMap).get(kv.getKey());
+    final Set<String> oldParents =
+        MoreObjects.firstNonNull(state.read(), Collections.emptySet());
+    // Forward new Pairs if they exist
+    if (!newParents.isEmpty()) {
+      for (final String parent : newParents) {
+
+        // Forward flat mapped record
+        final SensorParentKey key = new SensorParentKey(kv.getKey(), parent);
+        out.output(KV.of(key, record));
+      }
+    }
+    if (!newParents.equals(oldParents)) {
+      for (final String oldParent : oldParents) {
+        if (!newParents.contains(oldParent)) {
+          // Forward Delete
+          final SensorParentKey key = new SensorParentKey(kv.getKey(), oldParent);
+          out.output(KV.of(key, null));
+        }
+      }
+      state.write(newParents);
+    }
+  }
+}
+
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/FilterEvents.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/FilterEvents.java
new file mode 100644
index 0000000000000000000000000000000000000000..3588443393fdef2e0fd1bf5f1e7c497e5030cf77
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/FilterEvents.java
@@ -0,0 +1,19 @@
+package application;
+
+import org.apache.beam.sdk.transforms.SerializableFunction;
+import org.apache.beam.sdk.values.KV;
+import titan.ccp.configuration.events.Event;
+
+/**
+ * Filters for {@code Event.SENSOR_REGISTRY_CHANGED} and {@code Event.SENSOR_REGISTRY_STATUS}
+ * events.
+ */
+public class FilterEvents implements SerializableFunction<KV<Event, String>, Boolean> {
+  private static final long serialVersionUID = -2233447357614891559L;
+
+  @Override
+  public Boolean apply(final KV<Event, String> kv) {
+    return kv.getKey() == Event.SENSOR_REGISTRY_CHANGED
+        || kv.getKey() == Event.SENSOR_REGISTRY_STATUS;
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/FilterNullValues.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/FilterNullValues.java
new file mode 100644
index 0000000000000000000000000000000000000000..143294f1ff2bfeea77c40ce38cd10ce3eb44be49
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/FilterNullValues.java
@@ -0,0 +1,18 @@
+package application;
+
+import org.apache.beam.sdk.transforms.SerializableFunction;
+import org.apache.beam.sdk.values.KV;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Filters {@code null} Values.
+ */
+public class FilterNullValues implements
+    SerializableFunction<KV<SensorParentKey, ActivePowerRecord>, Boolean> {
+  private static final long serialVersionUID = -6197352369880867482L;
+
+  @Override
+  public Boolean apply(final KV<SensorParentKey, ActivePowerRecord> kv) {
+    return kv.getValue() != null;
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/GenerateParentsFn.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/GenerateParentsFn.java
new file mode 100644
index 0000000000000000000000000000000000000000..68cf551af9c681c586ebc6026c043ae8c9befbc5
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/GenerateParentsFn.java
@@ -0,0 +1,60 @@
+package application;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.values.KV;
+// import theodolite.uc2.streamprocessing.KeyValue;
+// import theodolite.uc2.streamprocessing.KeyValueIterator;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.sensorregistry.AggregatedSensor;
+import titan.ccp.model.sensorregistry.Sensor;
+import titan.ccp.model.sensorregistry.SensorRegistry;
+
+/**
+ * DoFn class to generate a child-parent pair for every sensor in the hierarchy.
+ */
+public class GenerateParentsFn extends DoFn<KV<Event, String>, KV<String, Set<String>>> {
+
+  private static final long serialVersionUID = 958270648688932091L;
+
+  /**
+   * Transforms a parent [children] map of sensors to a child [parents] map.
+   *
+   * @param kv input map.
+   * @param out outputstream.
+   */
+  @ProcessElement
+  public void processElement(@Element final KV<Event, String> kv,
+      final OutputReceiver<KV<String, Set<String>>> out) {
+    final Map<String, Set<String>> childParentsPairs =
+        this.constructChildParentsPairs(SensorRegistry.fromJson(kv.getValue()));
+    final Iterator<Map.Entry<String, Set<String>>> it = childParentsPairs.entrySet().iterator();
+    while (it.hasNext()) {
+      final Map.Entry<String, Set<String>> pair = it.next();
+      out.output(KV.of(pair.getKey(), pair.getValue()));
+    }
+
+  }
+
+  private Map<String, Set<String>> constructChildParentsPairs(final SensorRegistry registry) {
+    return this.streamAllChildren(registry.getTopLevelSensor())
+        .collect(Collectors.<Sensor, String, Set<String>>toMap(
+            child -> child.getIdentifier(),
+            child -> child.getParent()
+                .map(p -> Stream.of(p.getIdentifier()).collect(Collectors.toSet()))
+                .orElse(Collections.<String>emptySet())));
+  }
+
+  private Stream<Sensor> streamAllChildren(final AggregatedSensor sensor) {
+    return sensor.getChildren().stream()
+        .flatMap(s -> Stream.concat(
+            Stream.of(s),
+            s instanceof AggregatedSensor ? this.streamAllChildren((AggregatedSensor) s)
+                : Stream.empty()));
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/RecordAggregation.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/RecordAggregation.java
new file mode 100644
index 0000000000000000000000000000000000000000..16fd411b44cc1f955b255be870215ac120bce193
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/RecordAggregation.java
@@ -0,0 +1,63 @@
+package application;
+
+import java.io.Serializable;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.DefaultCoder;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import titan.ccp.model.records.ActivePowerRecord;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+
+
+
+/**
+ * CombineFn to aggregate ActivePowerRecords into AggregatedActivePowerRecords.
+ */
+public class RecordAggregation
+    extends CombineFn<ActivePowerRecord, RecordAggregation.Accum, AggregatedActivePowerRecord> {
+
+  private static final long serialVersionUID = 4362213539553233529L;
+
+  /**
+   * Wrapper for an accumulation of records.
+   */
+  @DefaultCoder(AvroCoder.class)
+  public static class Accum implements Serializable {
+    private static final long serialVersionUID = 3701311203919534376L;
+    private long count;
+    private Double sum = 0.0;
+    private long timestamp;
+  }
+
+  @Override
+  public Accum createAccumulator() {
+    return new Accum();
+  }
+
+  @Override
+  public Accum addInput(final Accum mutableAccumulator, final ActivePowerRecord input) {
+    mutableAccumulator.count += 1;
+    mutableAccumulator.sum += input.getValueInW();
+    mutableAccumulator.timestamp = input.getTimestamp();
+    return mutableAccumulator;
+  }
+
+  @Override
+  public Accum mergeAccumulators(final Iterable<Accum> accumulators) {
+    final Accum merged = this.createAccumulator();
+    for (final Accum accumulator : accumulators) {
+      merged.count += accumulator.count;
+      merged.sum += accumulator.sum;
+      merged.timestamp = accumulator.timestamp;
+    }
+
+    return merged;
+  }
+
+  @Override
+  public AggregatedActivePowerRecord extractOutput(final Accum accumulator) {
+    final double average = accumulator.count == 0 ? 0.0 : accumulator.sum / accumulator.count;
+    return new AggregatedActivePowerRecord("", accumulator.timestamp, accumulator.count,
+        accumulator.sum, average);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/SensorParentKey.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/SensorParentKey.java
new file mode 100644
index 0000000000000000000000000000000000000000..546fc04c2de089a28d8f0fba86a7fbcd5c1cc0a8
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/SensorParentKey.java
@@ -0,0 +1,30 @@
+package application;
+
+/**
+ * A key consisting of the identifier of a sensor and an identifier of parent sensor.
+ */
+public class SensorParentKey {
+
+  private final String sensorIdentifier;
+
+  private final String parentIdentifier;
+
+  public SensorParentKey(final String sensorIdentifier, final String parentIdentifier) {
+    this.sensorIdentifier = sensorIdentifier;
+    this.parentIdentifier = parentIdentifier;
+  }
+
+  public String getSensor() {
+    return this.sensorIdentifier;
+  }
+
+  public String getParent() {
+    return this.parentIdentifier;
+  }
+
+  @Override
+  public String toString() {
+    return "{" + this.sensorIdentifier + ", " + this.parentIdentifier + "}";
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/SetIdForAggregated.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/SetIdForAggregated.java
new file mode 100644
index 0000000000000000000000000000000000000000..0279a26ed925408b5383be50a202f5c88cea53aa
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/SetIdForAggregated.java
@@ -0,0 +1,22 @@
+package application;
+
+import org.apache.beam.sdk.transforms.SimpleFunction;
+import org.apache.beam.sdk.values.KV;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+
+/**
+ * Sets the identifier for new {@link AggregatedActivePowerRecord}.
+ */
+public class SetIdForAggregated extends
+    SimpleFunction<KV<String, AggregatedActivePowerRecord>, KV<String, AggregatedActivePowerRecord>> { // NOCS
+  private static final long serialVersionUID = 2148522605294086982L;
+
+  @Override
+  public KV<String, AggregatedActivePowerRecord> apply(
+      final KV<String, AggregatedActivePowerRecord> kv) {
+    final AggregatedActivePowerRecord record = new AggregatedActivePowerRecord(
+        kv.getKey(), kv.getValue().getTimestamp(), kv.getValue().getCount(),
+        kv.getValue().getSumInW(), kv.getValue().getAverageInW());
+    return KV.of(kv.getKey(), record);
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/SetKeyToGroup.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/SetKeyToGroup.java
new file mode 100644
index 0000000000000000000000000000000000000000..7d8dc70583fb45e02a5a8091b92d724bb22b4a78
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/SetKeyToGroup.java
@@ -0,0 +1,20 @@
+package application;
+
+import org.apache.beam.sdk.transforms.SimpleFunction;
+import org.apache.beam.sdk.values.KV;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Set the Key for a group of {@code ActivePowerRecords} to their Parent.
+ */
+public class SetKeyToGroup
+    extends SimpleFunction<KV<SensorParentKey, ActivePowerRecord>, KV<String, ActivePowerRecord>> {
+
+  private static final long serialVersionUID = 790215050768527L;
+
+  @Override
+  public KV<String, ActivePowerRecord> apply(
+      final KV<SensorParentKey, ActivePowerRecord> kv) {
+    return KV.of(kv.getKey().getParent(), kv.getValue());
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/Uc4BeamPipeline.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/Uc4BeamPipeline.java
new file mode 100644
index 0000000000000000000000000000000000000000..7179fe5da937280d5baf72cd73cc392ef15a60e0
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/Uc4BeamPipeline.java
@@ -0,0 +1,248 @@
+package application; // NOPMD
+
+import com.google.common.math.StatsAccumulator;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.CoderRegistry;
+import org.apache.beam.sdk.coders.SetCoder;
+import org.apache.beam.sdk.coders.StringUtf8Coder;
+import org.apache.beam.sdk.io.kafka.KafkaIO;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.Filter;
+import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.Latest;
+import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.View;
+import org.apache.beam.sdk.transforms.windowing.AfterPane;
+import org.apache.beam.sdk.transforms.windowing.AfterProcessingTime;
+import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
+import org.apache.beam.sdk.transforms.windowing.FixedWindows;
+import org.apache.beam.sdk.transforms.windowing.Repeatedly;
+import org.apache.beam.sdk.transforms.windowing.Window;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionList;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.commons.configuration2.Configuration;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.joda.time.Duration;
+import serialization.AggregatedActivePowerRecordCoder;
+import serialization.AggregatedActivePowerRecordDeserializer;
+import serialization.AggregatedActivePowerRecordSerializer;
+import serialization.EventCoder;
+import serialization.EventDeserializer;
+import serialization.SensorParentKeyCoder;
+import theodolite.commons.beam.AbstractPipeline;
+import theodolite.commons.beam.ConfigurationKeys;
+import theodolite.commons.beam.kafka.KafkaActivePowerTimestampReader;
+import theodolite.commons.beam.kafka.KafkaGenericReader;
+import theodolite.commons.beam.kafka.KafkaWriterTransformation;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.records.ActivePowerRecord;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+
+/**
+ * Implementation of the use case Hierarchical Aggregation using Apache Beam.
+ */
+public final class Uc4BeamPipeline extends AbstractPipeline {
+
+  protected Uc4BeamPipeline(final PipelineOptions options, final Configuration config) { // NOPMD
+    super(options, config);
+
+    // Additional needed variables
+    final String feedbackTopic = config.getString(ConfigurationKeys.KAFKA_FEEDBACK_TOPIC);
+    final String outputTopic = config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC);
+    final String configurationTopic = config.getString(ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC);
+
+    final Duration duration =
+        Duration.standardSeconds(config.getInt(ConfigurationKeys.KAFKA_WINDOW_DURATION_MINUTES));
+    final Duration triggerDelay =
+        Duration.standardSeconds(config.getInt(ConfigurationKeys.TRIGGER_INTERVAL));
+    final Duration gracePeriod =
+        Duration.standardSeconds(config.getInt(ConfigurationKeys.GRACE_PERIOD_MS));
+
+    // Build kafka configuration
+    final Map<String, Object> consumerConfig = this.buildConsumerConfig();
+    final Map<String, Object> configurationConfig = this.configurationConfig(config);
+
+    // Set Coders for Classes that will be distributed
+    final CoderRegistry cr = this.getCoderRegistry();
+    registerCoders(cr);
+
+    // Read from Kafka
+    // ActivePowerRecords
+    final KafkaActivePowerTimestampReader kafkaActivePowerRecordReader =
+        new KafkaActivePowerTimestampReader(this.bootstrapServer, this.inputTopic, consumerConfig);
+
+    // Configuration Events
+    final KafkaGenericReader<Event, String> kafkaConfigurationReader =
+        new KafkaGenericReader<>(
+            this.bootstrapServer, configurationTopic, configurationConfig,
+            EventDeserializer.class, StringDeserializer.class);
+
+    // Transform into AggregatedActivePowerRecords into ActivePowerRecords
+    final AggregatedToActive aggregatedToActive = new AggregatedToActive();
+
+    // Write to Kafka
+    final KafkaWriterTransformation<AggregatedActivePowerRecord> kafkaOutput =
+        new KafkaWriterTransformation<>(
+            this.bootstrapServer, outputTopic, AggregatedActivePowerRecordSerializer.class);
+
+    final KafkaWriterTransformation<AggregatedActivePowerRecord> kafkaFeedback =
+        new KafkaWriterTransformation<>(
+            this.bootstrapServer, feedbackTopic, AggregatedActivePowerRecordSerializer.class);
+
+    // Apply pipeline transformations
+    final PCollection<KV<String, ActivePowerRecord>> values = this
+        .apply("Read from Kafka", kafkaActivePowerRecordReader)
+        .apply("Read Windows", Window.into(FixedWindows.of(duration)))
+        .apply("Set trigger for input", Window
+            .<KV<String, ActivePowerRecord>>configure()
+            .triggering(Repeatedly.forever(
+                AfterProcessingTime.pastFirstElementInPane()
+                    .plusDelayOf(triggerDelay)))
+            .withAllowedLateness(gracePeriod)
+            .discardingFiredPanes());
+
+    // Read the results of earlier aggregations.
+    final PCollection<KV<String, ActivePowerRecord>> aggregationsInput = this
+        .apply("Read aggregation results", KafkaIO.<String, AggregatedActivePowerRecord>read()
+            .withBootstrapServers(this.bootstrapServer)
+            .withTopic(feedbackTopic)
+            .withKeyDeserializer(StringDeserializer.class)
+            .withValueDeserializer(AggregatedActivePowerRecordDeserializer.class)
+            .withTimestampPolicyFactory(
+                (tp, previousWaterMark) -> new AggregatedActivePowerRecordEventTimePolicy(
+                    previousWaterMark))
+            .withoutMetadata())
+        .apply("Apply Windows", Window.into(FixedWindows.of(duration)))
+        // Convert into the correct data format
+        .apply("Convert AggregatedActivePowerRecord to ActivePowerRecord",
+            MapElements.via(aggregatedToActive))
+        .apply("Set trigger for feedback", Window
+            .<KV<String, ActivePowerRecord>>configure()
+            .triggering(Repeatedly.forever(
+                AfterProcessingTime.pastFirstElementInPane()
+                    .plusDelayOf(triggerDelay)))
+            .withAllowedLateness(gracePeriod)
+            .discardingFiredPanes());
+
+    // Prepare flatten
+    final PCollectionList<KV<String, ActivePowerRecord>> collections =
+        PCollectionList.of(values).and(aggregationsInput);
+
+    // Create a single PCollection out of the input and already computed results
+    final PCollection<KV<String, ActivePowerRecord>> inputCollection =
+        collections.apply("Flatten sensor data and aggregation results",
+            Flatten.pCollections());
+
+    // Build the configuration stream from a changelog.
+    final PCollection<KV<String, Set<String>>> configurationStream = this
+        .apply("Read sensor groups", kafkaConfigurationReader)
+        // Only forward relevant changes in the hierarchy
+        .apply("Filter changed and status events",
+            Filter.by(new FilterEvents()))
+        // Build the changelog
+        .apply("Generate Parents for every Sensor", ParDo.of(new GenerateParentsFn()))
+        .apply("Update child and parent pairs", ParDo.of(new UpdateChildParentPairs()))
+        .apply("Set trigger for configuration", Window
+            .<KV<String, Set<String>>>configure()
+            .triggering(AfterWatermark.pastEndOfWindow()
+                .withEarlyFirings(
+                    AfterPane.elementCountAtLeast(1)))
+            .withAllowedLateness(Duration.ZERO)
+            .accumulatingFiredPanes());
+
+    final PCollectionView<Map<String, Set<String>>> childParentPairMap =
+        configurationStream.apply(Latest.perKey())
+            // Reset trigger to avoid synchronized processing time
+            .apply("Reset trigger for configurations", Window
+                .<KV<String, Set<String>>>configure()
+                .triggering(AfterWatermark.pastEndOfWindow()
+                    .withEarlyFirings(
+                        AfterPane.elementCountAtLeast(1)))
+                .withAllowedLateness(Duration.ZERO)
+                .accumulatingFiredPanes())
+            .apply(View.asMap());
+
+    final FilterNullValues filterNullValues = new FilterNullValues();
+
+    // Build pairs of every sensor reading and parent
+    final PCollection<KV<SensorParentKey, ActivePowerRecord>> flatMappedValues =
+        inputCollection.apply(
+            "Duplicate as flatMap",
+            ParDo.of(new DuplicateAsFlatMap(childParentPairMap))
+                .withSideInputs(childParentPairMap))
+            .apply("Filter only latest changes", Latest.perKey())
+            .apply("Filter out null values",
+                Filter.by(filterNullValues));
+
+    final SetIdForAggregated setIdForAggregated = new SetIdForAggregated();
+    final SetKeyToGroup setKeyToGroup = new SetKeyToGroup();
+
+    // Aggregate for every sensor group of the current level
+    final PCollection<KV<String, AggregatedActivePowerRecord>> aggregations = flatMappedValues
+        .apply("Set key to group", MapElements.via(setKeyToGroup))
+        // Reset trigger to avoid synchronized processing time
+        .apply("Reset trigger for aggregations", Window
+            .<KV<String, ActivePowerRecord>>configure()
+            .triggering(Repeatedly.forever(
+                AfterProcessingTime.pastFirstElementInPane()
+                    .plusDelayOf(triggerDelay)))
+            .withAllowedLateness(gracePeriod)
+            .discardingFiredPanes())
+        .apply(
+            "Aggregate per group",
+            Combine.perKey(new RecordAggregation()))
+        .apply("Set the Identifier in AggregatedActivePowerRecord",
+            MapElements.via(setIdForAggregated));
+
+    aggregations.apply("Write to aggregation results", kafkaOutput);
+
+    aggregations
+        .apply("Write to feedback topic", kafkaFeedback);
+
+  }
+
+
+  /**
+   * Builds a simple configuration for a Kafka consumer transformation.
+   *
+   * @return the build configuration.
+   */
+  public Map<String, Object> configurationConfig(final Configuration config) {
+    final Map<String, Object> consumerConfig = new HashMap<>();
+    consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
+        config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
+    consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
+        config
+            .getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
+
+    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, config
+        .getString(ConfigurationKeys.APPLICATION_NAME) + "-configuration");
+    return consumerConfig;
+  }
+
+
+  /**
+   * Registers all Coders for all needed Coders.
+   *
+   * @param cr CoderRegistry.
+   */
+  private static void registerCoders(final CoderRegistry cr) {
+    cr.registerCoderForClass(ActivePowerRecord.class,
+        AvroCoder.of(ActivePowerRecord.class));
+    cr.registerCoderForClass(AggregatedActivePowerRecord.class,
+        new AggregatedActivePowerRecordCoder());
+    cr.registerCoderForClass(Set.class, SetCoder.of(StringUtf8Coder.of()));
+    cr.registerCoderForClass(Event.class, new EventCoder());
+    cr.registerCoderForClass(SensorParentKey.class, new SensorParentKeyCoder());
+    cr.registerCoderForClass(StatsAccumulator.class, AvroCoder.of(StatsAccumulator.class));
+  }
+}
+
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/application/UpdateChildParentPairs.java b/theodolite-benchmarks/uc4-beam/src/main/java/application/UpdateChildParentPairs.java
new file mode 100644
index 0000000000000000000000000000000000000000..8692be5ae6637ebda86f10d66b43c6071264e099
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/application/UpdateChildParentPairs.java
@@ -0,0 +1,36 @@
+package application;
+
+import java.util.Set;
+import org.apache.beam.sdk.state.StateSpec;
+import org.apache.beam.sdk.state.StateSpecs;
+import org.apache.beam.sdk.state.ValueState;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.values.KV;
+
+/**
+ * Forward changes or tombstone values for deleted records.
+ */
+public class UpdateChildParentPairs extends DoFn<KV<String, Set<String>>, KV<String, Set<String>>> {
+
+  private static final long serialVersionUID = 1L;
+
+  @StateId("parents")
+  private final StateSpec<ValueState<Set<String>>> parents = // NOPMD
+      StateSpecs.value();
+
+  /**
+   * Match the changes accordingly.
+   *
+   * @param kv the sensor parents set that contains the changes.
+   */
+  @ProcessElement
+  public void processElement(@Element final KV<String, Set<String>> kv,
+      final OutputReceiver<KV<String, Set<String>>> out,
+      @StateId("parents") final ValueState<Set<String>> state) {
+    if (kv.getValue() == null || !kv.getValue().equals(state.read())) {
+      out.output(kv);
+      state.write(kv.getValue());
+    }
+
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordCoder.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordCoder.java
new file mode 100644
index 0000000000000000000000000000000000000000..d2b484f5ab30be63f311d6dbcf495baebbd5e2b4
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordCoder.java
@@ -0,0 +1,57 @@
+package serialization;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.util.List;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.CoderException;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+
+/**
+ * Wrapper Class that encapsulates a AggregatedActivePowerRecord Serde in a
+ * org.apache.beam.sdk.coders.Coder.
+ */
+@SuppressWarnings("serial")
+public class AggregatedActivePowerRecordCoder extends Coder<AggregatedActivePowerRecord>
+    implements Serializable {
+
+  private static final boolean DETERMINISTIC = true;
+
+  private transient AvroCoder<AggregatedActivePowerRecord> avroEnCoder =
+      AvroCoder.of(AggregatedActivePowerRecord.class);
+
+  @Override
+  public void encode(final AggregatedActivePowerRecord value, final OutputStream outStream)
+      throws CoderException, IOException {
+    if (this.avroEnCoder == null) {
+      this.avroEnCoder = AvroCoder.of(AggregatedActivePowerRecord.class);
+    }
+    this.avroEnCoder.encode(value, outStream);
+
+  }
+
+  @Override
+  public AggregatedActivePowerRecord decode(final InputStream inStream)
+      throws CoderException, IOException {
+    if (this.avroEnCoder == null) {
+      this.avroEnCoder = AvroCoder.of(AggregatedActivePowerRecord.class);
+    }
+    return this.avroEnCoder.decode(inStream);
+
+  }
+
+  @Override
+  public List<? extends Coder<?>> getCoderArguments() {
+    return null;
+  }
+
+  @Override
+  public void verifyDeterministic() throws NonDeterministicException {
+    if (!DETERMINISTIC) {
+      throw new NonDeterministicException(this, "This class should be deterministic!");
+    }
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordDeserializer.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordDeserializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..6e2f2765ff65d3bca2a127be36db0854f15afebc
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordDeserializer.java
@@ -0,0 +1,34 @@
+package serialization;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+
+/**
+ * Wrapper Class that encapsulates a IMonitoringRecordSerde.serializer in a Deserializer
+ */
+public class AggregatedActivePowerRecordDeserializer
+    implements Deserializer<AggregatedActivePowerRecord> {
+
+  private static final Logger LOGGER =
+      LoggerFactory.getLogger(AggregatedActivePowerRecordDeserializer.class);
+
+  private final transient AvroCoder<AggregatedActivePowerRecord> avroEnCoder =
+      AvroCoder.of(AggregatedActivePowerRecord.class);
+
+  @Override
+  public AggregatedActivePowerRecord deserialize(final String topic, final byte[] data) {
+    AggregatedActivePowerRecord value = null;
+    try {
+      value = this.avroEnCoder.decode(new ByteArrayInputStream(data));
+    } catch (final IOException e) {
+      LOGGER.error("Could not deserialize AggregatedActivePowerRecord", e);
+    }
+    return value;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordSerializer.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordSerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..77b79d5465f1d561870bf5b04f8fa20f87076adb
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/AggregatedActivePowerRecordSerializer.java
@@ -0,0 +1,45 @@
+package serialization;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.kafka.common.serialization.Serializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+
+/**
+ * Wrapper Class that encapsulates a IMonitoringRecordSerde.serializer in a Serializer
+ */
+public class AggregatedActivePowerRecordSerializer
+    implements Serializer<AggregatedActivePowerRecord> {
+
+  private static final Logger LOGGER =
+      LoggerFactory.getLogger(AggregatedActivePowerRecordSerializer.class);
+
+  private final transient AvroCoder<AggregatedActivePowerRecord> avroEnCoder =
+      AvroCoder.of(AggregatedActivePowerRecord.class);
+
+  @Override
+  public byte[] serialize(final String topic, final AggregatedActivePowerRecord data) {
+    final ByteArrayOutputStream out = new ByteArrayOutputStream();
+    try {
+      this.avroEnCoder.encode(data, out);
+    } catch (final IOException e) {
+      LOGGER.error("Could not serialize AggregatedActivePowerRecord", e);
+    }
+    final byte[] result = out.toByteArray();
+    try {
+      out.close();
+    } catch (final IOException e) {
+      LOGGER.error(
+          "Could not close output stream after serialization of AggregatedActivePowerRecord", e);
+    }
+    return result;
+  }
+
+  @Override
+  public void close() {
+    Serializer.super.close();
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/EventCoder.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/EventCoder.java
new file mode 100644
index 0000000000000000000000000000000000000000..710beb71dc8776e6309028327b05307aa590a7f6
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/EventCoder.java
@@ -0,0 +1,63 @@
+package serialization;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.List;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.CoderException;
+import org.apache.kafka.common.serialization.Serde;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.configuration.events.EventSerde;
+
+/**
+ * Wrapper Class that encapsulates a Event Serde in a org.apache.beam.sdk.coders.Coder.
+ */
+public class EventCoder extends Coder<Event> implements Serializable {
+
+  private static final long serialVersionUID = 8403045343970659100L;
+  private static final int VALUE_SIZE = 4;
+  private static final boolean DETERMINISTIC = true;
+
+  private transient Serde<Event> innerSerde = EventSerde.serde();
+
+  @Override
+  public void encode(final Event value, final OutputStream outStream)
+      throws CoderException, IOException {
+    if (this.innerSerde == null) {
+      this.innerSerde = EventSerde.serde();
+    }
+    final byte[] bytes = this.innerSerde.serializer().serialize("ser", value);
+    final byte[] sizeinBytes = ByteBuffer.allocate(VALUE_SIZE).putInt(bytes.length).array();
+    outStream.write(sizeinBytes);
+    outStream.write(bytes);
+  }
+
+  @Override
+  public Event decode(final InputStream inStream) throws CoderException, IOException {
+    if (this.innerSerde == null) {
+      this.innerSerde = EventSerde.serde();
+    }
+    final byte[] sizeinBytes = new byte[VALUE_SIZE];
+    inStream.read(sizeinBytes);
+    final int size = ByteBuffer.wrap(sizeinBytes).getInt();
+    final byte[] bytes = new byte[size];
+    inStream.read(bytes);
+    return this.innerSerde.deserializer().deserialize("deser", bytes);
+  }
+
+  @Override
+  public List<? extends Coder<?>> getCoderArguments() {
+    return Collections.emptyList();
+  }
+
+  @Override
+  public void verifyDeterministic() throws NonDeterministicException {
+    if (!DETERMINISTIC) {
+      throw new NonDeterministicException(this, "This class should be deterministic!");
+    }
+  }
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/EventDeserializer.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/EventDeserializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..34e31a3059d0749848a30979f32e6df6651c1b47
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/EventDeserializer.java
@@ -0,0 +1,36 @@
+package serialization;
+
+import java.util.Map;
+import org.apache.kafka.common.serialization.ByteBufferDeserializer;
+import org.apache.kafka.common.serialization.Deserializer;
+import titan.ccp.configuration.events.Event;
+
+/**
+ * Deserializer for Events(SensorRegistry changes).
+ */
+public class EventDeserializer implements Deserializer<Event> {
+
+  private final ByteBufferDeserializer byteBufferDeserializer = new ByteBufferDeserializer();
+
+  @Override
+  public void configure(final Map<String, ?> configs, final boolean isKey) {
+    this.byteBufferDeserializer.configure(configs, isKey);
+  }
+
+  @Override
+  public Event deserialize(final String topic, final byte[] data) {
+    final int ordinal = this.byteBufferDeserializer.deserialize(topic, data).getInt();
+    for (final Event event : Event.values()) {
+      if (ordinal == event.ordinal()) {
+        return event;
+      }
+    }
+    throw new IllegalArgumentException("Deserialized data is not a valid event.");
+  }
+
+  @Override
+  public void close() {
+    this.byteBufferDeserializer.close();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/SensorParentKeyCoder.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/SensorParentKeyCoder.java
new file mode 100644
index 0000000000000000000000000000000000000000..3e85c3242fb854bef514787c92bb58ad76526cb4
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/SensorParentKeyCoder.java
@@ -0,0 +1,67 @@
+package serialization;
+
+import application.SensorParentKey;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.List;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.CoderException;
+import org.apache.kafka.common.serialization.Serde;
+
+/**
+ * Wrapper Class that encapsulates a SensorParentKey Serde in a org.apache.beam.sdk.coders.Coder.
+ */
+public class SensorParentKeyCoder extends Coder<SensorParentKey> implements Serializable {
+
+  private static final long serialVersionUID = -3480141901035692398L;
+  private static final boolean DETERMINISTIC = true;
+  private static final int VALUE_SIZE = 4;
+
+  private transient Serde<SensorParentKey> innerSerde = SensorParentKeySerde.serde();
+
+  @Override
+  public void encode(final SensorParentKey value, final OutputStream outStream)
+      throws CoderException, IOException {
+    if (this.innerSerde == null) {
+      this.innerSerde = SensorParentKeySerde.serde();
+
+    }
+    final byte[] bytes = this.innerSerde.serializer().serialize("ser", value);
+    final byte[] sizeinBytes = ByteBuffer.allocate(VALUE_SIZE).putInt(bytes.length).array();
+    outStream.write(sizeinBytes);
+    outStream.write(bytes);
+
+  }
+
+  @Override
+  public SensorParentKey decode(final InputStream inStream) throws CoderException, IOException {
+    if (this.innerSerde == null) {
+      this.innerSerde = SensorParentKeySerde.serde();
+
+    }
+    final byte[] sizeinBytes = new byte[VALUE_SIZE];
+    inStream.read(sizeinBytes);
+    final int size = ByteBuffer.wrap(sizeinBytes).getInt();
+    final byte[] bytes = new byte[size];
+    inStream.read(bytes);
+    return this.innerSerde.deserializer().deserialize("deser", bytes);
+
+  }
+
+  @Override
+  public List<? extends Coder<?>> getCoderArguments() {
+    return Collections.emptyList();
+  }
+
+  @Override
+  public void verifyDeterministic() throws NonDeterministicException {
+    if (!DETERMINISTIC) {
+      throw new NonDeterministicException(this, "This class should be deterministic!");
+    }
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/serialization/SensorParentKeySerde.java b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/SensorParentKeySerde.java
new file mode 100644
index 0000000000000000000000000000000000000000..468adb3947439c11c4fd9b289f41b68e606bdb1d
--- /dev/null
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/serialization/SensorParentKeySerde.java
@@ -0,0 +1,34 @@
+package serialization;
+
+import application.SensorParentKey;
+import org.apache.kafka.common.serialization.Serde;
+import titan.ccp.common.kafka.simpleserdes.BufferSerde;
+import titan.ccp.common.kafka.simpleserdes.ReadBuffer;
+import titan.ccp.common.kafka.simpleserdes.SimpleSerdes;
+import titan.ccp.common.kafka.simpleserdes.WriteBuffer;
+
+/**
+ * {@link Serde} factory for {@link SensorParentKey}.
+ */
+public final class SensorParentKeySerde implements BufferSerde<SensorParentKey> {
+
+  private SensorParentKeySerde() {}
+
+  @Override
+  public void serialize(final WriteBuffer buffer, final SensorParentKey key) {
+    buffer.putString(key.getSensor());
+    buffer.putString(key.getParent());
+  }
+
+  @Override
+  public SensorParentKey deserialize(final ReadBuffer buffer) {
+    final String sensor = buffer.getString();
+    final String parent = buffer.getString();
+    return new SensorParentKey(sensor, parent);
+  }
+
+  public static Serde<SensorParentKey> serde() {
+    return SimpleSerdes.create(new SensorParentKeySerde());
+  }
+
+}
diff --git a/theodolite/crd/crd-benchmark.yaml b/theodolite/crd/crd-benchmark.yaml
index 7ab2e5f3b890a883f68dbbd36805f3791158f256..cd9c9f1e07c38a8727bcd23939319c0955e07645 100644
--- a/theodolite/crd/crd-benchmark.yaml
+++ b/theodolite/crd/crd-benchmark.yaml
@@ -64,13 +64,87 @@ spec:
                               type: array
                               items:
                                 type: string
+                  beforeActions:
+                    type: array
+                    default: []
+                    description: Infrastructure before actions are executed before the infrastructure is set up.
+                    items:
+                      type: object
+                      properties:
+                        selector:
+                          type: object
+                          description: The selector specifies which resource should be selected for the execution of the command.
+                          properties:
+                            pod:
+                              type: object
+                              description: Specifies the pod.
+                              properties:
+                                matchLabels:
+                                  type: object
+                                  description: The matchLabels of the desired pod.
+                                  additionalProperties: true
+                                  x-kubernetes-map-type: "granular"
+                                  default: { }
+                            container:
+                              description: Specifies the container.
+                              default: ""
+                              type: string
+                        exec:
+                          type: object
+                          description: Specifies command to be executed.
+                          properties:
+                            command:
+                              type: array
+                              description: The command to be executed as string array.
+                              items:
+                                type: string
+                            timeoutSeconds:
+                              description: Specifies the timeout (in seconds) for the specified command.
+                              type: integer
+                  afterActions:
+                    type: array
+                    default: []
+                    description: Infrastructure after actions are executed after the teardown of the infrastructure.
+                    items:
+                      type: object
+                      properties:
+                        selector:
+                          type: object
+                          description: The selector specifies which resource should be selected for the execution of the command.
+                          properties:
+                            pod:
+                              type: object
+                              description: Specifies the pod.
+                              properties:
+                                matchLabels:
+                                  type: object
+                                  description: The matchLabels of the desired pod.
+                                  additionalProperties: true
+                                  x-kubernetes-map-type: "granular"
+                                  default: { }
+                            container:
+                              description: Specifies the container.
+                              default: ""
+                              type: string
+                        exec:
+                          type: object
+                          description: Specifies command to be executed.
+                          properties:
+                            command:
+                              type: array
+                              description: The command to be executed as string array.
+                              items:
+                                type: string
+                            timeoutSeconds:
+                              description: Specifies the timeout (in seconds) for the specified command.
+                              type: integer
               sut:
                 description: The appResourceSets specifies all Kubernetes resources required to start the sut. A resourceSet can be either a configMap resourceSet or a fileSystem resourceSet.
                 type: object
                 properties:
                   resources:
                     type: array
-                    default: [ ]
+                    default: []
                     items:
                       type: object
                       oneOf:
@@ -101,6 +175,79 @@ spec:
                               type: array
                               items:
                                 type: string
+                  beforeActions:
+                    type: array
+                    default: []
+                    description: SUT before actions are executed before the SUT is started.
+                    items:
+                      type: object
+                      properties:
+                        selector:
+                          type: object
+                          description: The selector specifies which resource should be selected for the execution of the command.
+                          properties:
+                            pod:
+                              type: object
+                              description: Specifies the pod.
+                              properties:
+                                matchLabels:
+                                  type: object
+                                  description: The matchLabels of the desired pod.
+                                  additionalProperties: true
+                                  x-kubernetes-map-type: "granular"
+                                  default: { }
+                            container:
+                              description: Specifies the container.
+                              default: ""
+                              type: string
+                        exec:
+                          type: object
+                          description: Specifies command to be executed.
+                          properties:
+                            command:
+                              type: array
+                              description: The command to be executed as string array.
+                              items:
+                                type: string
+                            timeoutSeconds:
+                              description: Specifies the timeout (in seconds) for the specified command.
+                              type: integer
+                  afterActions:
+                    type: array
+                    default: []
+                    items:
+                      type: object
+                      properties:
+                        selector:
+                          type: object
+                          description: The selector specifies which resource should be selected for the execution of the command.
+                          properties:
+                            pod:
+                              type: object
+                              description: Specifies the pod.
+                              properties:
+                                matchLabels:
+                                  type: object
+                                  description: The matchLabels of the desired pod.
+                                  additionalProperties: true
+                                  x-kubernetes-map-type: "granular"
+                                  default: { }
+                            container:
+                              description: Specifies the container.
+                              default: ""
+                              type: string
+                        exec:
+                          type: object
+                          description: Specifies command to be executed.
+                          properties:
+                            command:
+                              type: array
+                              description: The command to be executed as string array.
+                              items:
+                                type: string
+                            timeoutSeconds:
+                              description: Specifies the timeout (in seconds) for the specified command.
+                              type: integer
               loadGenerator:
                 description: The loadGenResourceSets specifies all Kubernetes resources required to start the load generator. A resourceSet can be either a configMap resourceSet or a fileSystem resourceSet.
                 type: object
@@ -138,6 +285,80 @@ spec:
                               type: array
                               items:
                                 type: string
+                  beforeActions:
+                    type: array
+                    default: [ ]
+                    description: Load generator before actions are executed before the load generator is started.
+                    items:
+                      type: object
+                      properties:
+                        selector:
+                          type: object
+                          description: The selector specifies which resource should be selected for the execution of the command.
+                          properties:
+                            pod:
+                              type: object
+                              description: Specifies the pod.
+                              properties:
+                                matchLabels:
+                                  type: object
+                                  description: The matchLabels of the desired pod.
+                                  additionalProperties: true
+                                  x-kubernetes-map-type: "granular"
+                                  default: { }
+                            container:
+                              description: Specifies the container.
+                              default: ""
+                              type: string
+                        exec:
+                          type: object
+                          description: Specifies command to be executed.
+                          properties:
+                            command:
+                              type: array
+                              description: The command to be executed as string array.
+                              items:
+                                type: string
+                            timeoutSeconds:
+                              description: Specifies the timeout (in seconds) for the specified command.
+                              type: integer
+                  afterActions:
+                    type: array
+                    default: []
+                    description: Load generator after actions are executed after the teardown of the load generator.
+                    items:
+                      type: object
+                      properties:
+                        selector:
+                          type: object
+                          description: The selector specifies which resource should be selected for the execution of the command.
+                          properties:
+                            pod:
+                              type: object
+                              description: Specifies the pod.
+                              properties:
+                                matchLabels:
+                                  type: object
+                                  description: The matchLabels of the desired pod.
+                                  additionalProperties: true
+                                  x-kubernetes-map-type: "granular"
+                                  default: { }
+                            container:
+                              description: Specifies the container.
+                              default: ""
+                              type: string
+                        exec:
+                          type: object
+                          description: Specifies command to be executed.
+                          properties:
+                            command:
+                              type: array
+                              description: The command to be executed as string array.
+                              items:
+                                type: string
+                            timeoutSeconds:
+                              description: Specifies the timeout (in seconds) for the specified command.
+                              type: integer
               resourceTypes:
                 description: A list of resource types that can be scaled for this `benchmark` resource. For each resource type the concrete values are defined in the `execution` object.
                 type: array
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/Action.kt b/theodolite/src/main/kotlin/theodolite/benchmark/Action.kt
new file mode 100644
index 0000000000000000000000000000000000000000..35efebdc0fb2a3748660cb76cdd5499b4ca5f622
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/Action.kt
@@ -0,0 +1,48 @@
+package theodolite.benchmark
+
+import com.fasterxml.jackson.annotation.JsonInclude
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize
+import io.fabric8.kubernetes.client.NamespacedKubernetesClient
+import io.quarkus.runtime.annotations.RegisterForReflection
+import theodolite.util.ActionCommandFailedException
+import theodolite.util.Configuration
+
+@JsonDeserialize
+@RegisterForReflection
+@JsonInclude(JsonInclude.Include.NON_NULL)
+class Action {
+
+    lateinit var selector: ActionSelector
+    lateinit var exec: Command
+
+    fun exec(client: NamespacedKubernetesClient) {
+        val exitCode = ActionCommand(client = client)
+            .exec(
+                matchLabels = selector.pod.matchLabels,
+                container = selector.container,
+                timeout = exec.timeoutSeconds,
+                command = exec.command
+        )
+            if(exitCode != 0){
+            throw ActionCommandFailedException("Error while executing action, finished with exit code $exitCode")
+        }
+    }
+}
+
+@JsonDeserialize
+@RegisterForReflection
+class ActionSelector {
+    lateinit var pod: PodSelector
+    var container: String = ""
+}
+@JsonDeserialize
+@RegisterForReflection
+class PodSelector {
+    lateinit var matchLabels: MutableMap<String, String>
+}
+@JsonDeserialize
+@RegisterForReflection
+class Command {
+    lateinit var command: Array<String>
+    var timeoutSeconds: Long = Configuration.TIMEOUT_SECONDS
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/ActionCommand.kt b/theodolite/src/main/kotlin/theodolite/benchmark/ActionCommand.kt
new file mode 100644
index 0000000000000000000000000000000000000000..966fa56329c8d7d466dd14858bcbc06bb5b857c3
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/ActionCommand.kt
@@ -0,0 +1,161 @@
+package theodolite.benchmark
+
+import io.fabric8.kubernetes.api.model.Status
+import io.fabric8.kubernetes.client.KubernetesClientException
+import io.fabric8.kubernetes.client.NamespacedKubernetesClient
+import io.fabric8.kubernetes.client.dsl.ExecListener
+import io.fabric8.kubernetes.client.dsl.ExecWatch
+import io.fabric8.kubernetes.client.utils.Serialization
+import mu.KotlinLogging
+import okhttp3.Response
+import theodolite.util.ActionCommandFailedException
+import theodolite.util.Configuration
+import java.io.ByteArrayOutputStream
+import java.time.Duration
+import java.util.concurrent.CountDownLatch
+import java.util.concurrent.TimeUnit
+
+
+private val logger = KotlinLogging.logger {}
+
+class ActionCommand(val client: NamespacedKubernetesClient) {
+    var out: ByteArrayOutputStream = ByteArrayOutputStream()
+    var error: ByteArrayOutputStream = ByteArrayOutputStream()
+    var errChannelStream: ByteArrayOutputStream = ByteArrayOutputStream()
+    private val execLatch = CountDownLatch(1)
+
+    /**
+     * Executes an action command.
+     *
+     * @param matchLabels matchLabels specifies on which pod the command should be executed. For this, the principle
+     * `of any` of is used and the command is called on one of the possible pods.
+     * @param container (Optional) The container to run the command. Is optional iff exactly one container exist.
+     * @param command The command to be executed.
+     * @return the exit code of this executed command
+     */
+    fun exec(
+        matchLabels: MutableMap<String, String>,
+        command: Array<String>,
+        timeout: Long = Configuration.TIMEOUT_SECONDS,
+        container: String = ""
+    ): Int {
+        try {
+            val execWatch: ExecWatch = if (container.isNotEmpty()) {
+                client.pods()
+                    .inNamespace(client.namespace)
+                    .withName(getPodName(matchLabels, 3))
+                    .inContainer(container)
+
+            } else {
+                client.pods()
+                    .inNamespace(client.namespace)
+                    .withName(getPodName(matchLabels, 3))
+            }
+                .writingOutput(out)
+                .writingError(error)
+                .writingErrorChannel(errChannelStream)
+                .usingListener(ActionCommandListener(execLatch))
+                .exec(*command)
+
+            val latchTerminationStatus = execLatch.await(timeout, TimeUnit.SECONDS)
+            if (!latchTerminationStatus) {
+                throw ActionCommandFailedException("Latch could not terminate within specified time")
+            }
+            execWatch.close()
+        } catch (e: Exception) {
+            when (e) {
+                is InterruptedException -> {
+                    Thread.currentThread().interrupt()
+                    throw ActionCommandFailedException("Interrupted while waiting for the exec", e)
+                }
+                is KubernetesClientException -> {
+                    throw ActionCommandFailedException("Error while executing command", e)
+                }
+                else -> {
+                    throw e
+                }
+            }
+        }
+        logger.debug { "Execution Output Stream is \n $out" }
+        logger.debug { "Execution Error Stream is \n $error" }
+        logger.debug { "Execution ErrorChannel is: \n $errChannelStream" }
+        return getExitCode(errChannelStream)
+    }
+
+    private fun getExitCode(errChannelStream: ByteArrayOutputStream): Int {
+        val status: Status?
+        try {
+            status = Serialization.unmarshal(errChannelStream.toString(), Status::class.java)
+        } catch (e: Exception) {
+            throw ActionCommandFailedException("Could not determine the exit code, no information given")
+        }
+
+        if (status == null) {
+            throw ActionCommandFailedException("Could not determine the exit code, no information given")
+        }
+
+        return if (status.status.equals("Success")) {
+            0
+        } else status.details.causes.stream()
+            .filter { it.reason.equals("ExitCode") }
+            .map { it.message }
+            .findFirst()
+            .orElseThrow {
+                ActionCommandFailedException("Status is not SUCCESS but contains no exit code - Status: $status")
+            }.toInt()
+    }
+
+    /**
+     * Find pod with matching labels. The matching pod must have the status `Running`.
+     *
+     * @param matchLabels the match labels
+     * @param tries specifies the number of times to look for a  matching pod. When pods are newly created,
+     * it can take a while until the status is ready and the pod can be selected.
+     * @return the name of the pod or throws [ActionCommandFailedException]
+     */
+    fun getPodName(matchLabels: MutableMap<String, String>, tries: Int): String {
+        for (i in 1..tries) {
+
+            try {
+                return getPodName(matchLabels)
+            } catch (e: Exception) {
+                logger.warn { "Could not found any pod with specified matchlabels or pod is not ready." }
+            }
+            Thread.sleep(Duration.ofSeconds(5).toMillis())
+        }
+        throw ActionCommandFailedException("Couldn't find any pod that matches the specified labels.")
+    }
+
+    private fun getPodName(matchLabels: MutableMap<String, String>): String {
+        return try {
+            val podNames = this.client
+                .pods()
+                .withLabels(matchLabels)
+                .list()
+                .items
+                .map { it.metadata.name }
+
+            podNames.first {
+                this.client.pods().withName(it).isReady
+            }
+
+        } catch (e: NoSuchElementException) {
+            throw ActionCommandFailedException("Couldn't find any pod that matches the specified labels.", e)
+        }
+    }
+
+    private class ActionCommandListener(val execLatch: CountDownLatch) : ExecListener {
+        override fun onOpen(response: Response) {
+        }
+
+        override fun onFailure(throwable: Throwable, response: Response) {
+            execLatch.countDown()
+            throw ActionCommandFailedException("Some error encountered while executing action, caused ${throwable.message})")
+        }
+
+        override fun onClose(code: Int, reason: String) {
+            execLatch.countDown()
+        }
+    }
+
+}
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt
index 0b81f8701f92a95662efef6e0d58839c9a2f6f3b..2514c32158f07f822b34697cb7c4810848bfd27b 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt
@@ -47,7 +47,7 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
     var namespace = System.getenv("NAMESPACE") ?: DEFAULT_NAMESPACE
 
     @Transient
-    private val client: NamespacedKubernetesClient = DefaultKubernetesClient().inNamespace(namespace)
+    private var client: NamespacedKubernetesClient = DefaultKubernetesClient().inNamespace(namespace)
 
     /**
      * Loads [KubernetesResource]s.
@@ -59,6 +59,7 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
     }
 
     override fun setupInfrastructure() {
+        this.infrastructure.beforeActions.forEach { it.exec(client = client) }
         val kubernetesManager = K8sManager(this.client)
         loadKubernetesResources(this.infrastructure.resources)
             .map{it.second}
@@ -70,7 +71,8 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
         loadKubernetesResources(this.infrastructure.resources)
             .map{it.second}
             .forEach { kubernetesManager.remove(it) }
-        }
+        this.infrastructure.afterActions.forEach { it.exec(client = client) }
+    }
 
     /**
      * Builds a deployment.
@@ -110,6 +112,10 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
             }
         }
         return KubernetesBenchmarkDeployment(
+            sutBeforeActions = sut.beforeActions,
+            sutAfterActions = sut.afterActions,
+            loadGenBeforeActions = loadGenerator.beforeActions,
+            loadGenAfterActions = loadGenerator.afterActions,
             appResources = appResources.map { it.second },
             loadGenResources = loadGenResources.map { it.second },
             loadGenerationDelay = loadGenerationDelay,
@@ -119,4 +125,13 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
             client = this.client
         )
     }
+
+    /**
+     * This function can be used to set the Kubernetes client manually. This is for example necessary for testing.
+     *
+     * @param client
+     */
+    fun setClient(client: NamespacedKubernetesClient) {
+        this.client = client
+    }
 }
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt
index 423ac92c654ff55057796d9642c2cb408bc62fe5..9d32a4eeab656143e10b5057a173e04245d6f22b 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt
@@ -23,6 +23,10 @@ private val logger = KotlinLogging.logger {}
  */
 @RegisterForReflection
 class KubernetesBenchmarkDeployment(
+    private val sutBeforeActions: List<Action>,
+    private val sutAfterActions: List<Action>,
+    private val loadGenBeforeActions: List<Action>,
+    private val loadGenAfterActions: List<Action>,
     val appResources: List<KubernetesResource>,
     val loadGenResources: List<KubernetesResource>,
     private val loadGenerationDelay: Long,
@@ -45,10 +49,13 @@ class KubernetesBenchmarkDeployment(
         val kafkaTopics = this.topics.filter { !it.removeOnly }
             .map { NewTopic(it.name, it.numPartitions, it.replicationFactor) }
         kafkaController.createTopics(kafkaTopics)
+        sutBeforeActions.forEach { it.exec(client = client) }
         appResources.forEach { kubernetesManager.deploy(it) }
         logger.info { "Wait ${this.loadGenerationDelay} seconds before starting the load generator." }
         Thread.sleep(Duration.ofSeconds(this.loadGenerationDelay).toMillis())
+        loadGenBeforeActions.forEach { it.exec(client = client) }
         loadGenResources.forEach { kubernetesManager.deploy(it) }
+
     }
 
     /**
@@ -59,7 +66,9 @@ class KubernetesBenchmarkDeployment(
      */
     override fun teardown() {
         loadGenResources.forEach { kubernetesManager.remove(it) }
+        loadGenAfterActions.forEach { it.exec(client = client) }
         appResources.forEach { kubernetesManager.remove(it) }
+        sutAfterActions.forEach { it.exec(client = client) }
         kafkaController.removeTopics(this.topics.map { topic -> topic.name })
         ResourceByLabelHandler(client).removePods(
             labelName = LAG_EXPORTER_POD_LABEL_NAME,
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/Resources.kt b/theodolite/src/main/kotlin/theodolite/benchmark/Resources.kt
index 0187735b8fd273419874942cb7ed68797732c84c..fccbd2c41a646a2ef85ef77c65763e7f793d1e91 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/Resources.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/Resources.kt
@@ -1,7 +1,6 @@
 package theodolite.benchmark
 
 import com.fasterxml.jackson.databind.annotation.JsonDeserialize
-import com.fasterxml.jackson.databind.annotation.JsonSerialize
 import io.quarkus.runtime.annotations.RegisterForReflection
 
 @JsonDeserialize
@@ -9,5 +8,7 @@ import io.quarkus.runtime.annotations.RegisterForReflection
 class Resources {
 
     lateinit var resources: List<ResourceSets>
+    lateinit var beforeActions: List<Action>
+    lateinit var afterActions: List<Action>
 
 }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/AnalysisExecutor.kt b/theodolite/src/main/kotlin/theodolite/evaluation/AnalysisExecutor.kt
index 281c68e318784ee8206473cd014f814b3f5152a9..9a1315e7a88f0cdcae06dbb7ead757e1c0ce9931 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/AnalysisExecutor.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/AnalysisExecutor.kt
@@ -37,7 +37,6 @@ class AnalysisExecutor(
      *  @return true if the experiment succeeded.
      */
     fun analyze(load: LoadDimension, res: Resource, executionIntervals: List<Pair<Instant, Instant>>): Boolean {
-        var result: Boolean
         var repetitionCounter = 1
 
         try {
@@ -50,7 +49,7 @@ class AnalysisExecutor(
                     fetcher.fetchMetric(
                         start = interval.first,
                         end = interval.second,
-                        query = SloConfigHandler.getQueryString(sloType = slo.sloType)
+                        query = SloConfigHandler.getQueryString(slo = slo)
                     )
                 }
 
@@ -68,12 +67,11 @@ class AnalysisExecutor(
                 load = load
             )
 
-            result = sloChecker.evaluate(prometheusData)
+            return sloChecker.evaluate(prometheusData)
 
         } catch (e: Exception) {
-            throw EvaluationFailedException("Evaluation failed for resource '${res.get()}' and load '${load.get()} ", e)
+            throw EvaluationFailedException("Evaluation failed for resource '${res.get()}' and load '${load.get()}", e)
         }
-        return result
     }
 
     private val NONLATIN: Pattern = Pattern.compile("[^\\w-]")
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/ExternalSloChecker.kt b/theodolite/src/main/kotlin/theodolite/evaluation/ExternalSloChecker.kt
index d646286b70bc5880df1f603afdc2bda22bcc3259..7fb5417e200f64b0db74a8bebe69a751c5d484b8 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/ExternalSloChecker.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/ExternalSloChecker.kt
@@ -1,6 +1,5 @@
 package theodolite.evaluation
 
-import com.google.gson.Gson
 import khttp.post
 import mu.KotlinLogging
 import theodolite.util.PrometheusResponse
@@ -9,13 +8,11 @@ import java.net.ConnectException
 /**
  * [SloChecker] that uses an external source for the concrete evaluation.
  * @param externalSlopeURL The url under which the external evaluation can be reached.
- * @param threshold threshold that should not be exceeded to evaluate to true.
- * @param warmup time that is not taken into consideration for the evaluation.
+ * @param metadata metadata passed to the external SLO checker.
  */
 class ExternalSloChecker(
     private val externalSlopeURL: String,
-    private val threshold: Int,
-    private val warmup: Int
+    private val metadata: Map<String, Any>
 ) : SloChecker {
 
     private val RETRIES = 2
@@ -28,29 +25,25 @@ class ExternalSloChecker(
      * Will try to reach the external service until success or [RETRIES] times.
      * Each request will timeout after [TIMEOUT].
      *
-     * @param start point of the experiment.
-     * @param end point of the experiment.
      * @param fetchedData that should be evaluated
-     * @return true if the experiment was successful(the threshold was not exceeded.
+     * @return true if the experiment was successful (the threshold was not exceeded).
      * @throws ConnectException if the external service could not be reached.
      */
     override fun evaluate(fetchedData: List<PrometheusResponse>): Boolean {
         var counter = 0
-        val data = SloJson.Builder()
-            .results(fetchedData.map { it.data?.result })
-            .addMetadata("threshold", threshold)
-            .addMetadata( "warmup", warmup)
-            .build()
-            .toJson()
+        val data = SloJson(
+            results = fetchedData.map { it.data?.result ?: listOf() },
+            metadata = metadata
+        ).toJson()
 
         while (counter < RETRIES) {
             val result = post(externalSlopeURL, data = data, timeout = TIMEOUT)
             if (result.statusCode != 200) {
                 counter++
-                logger.error { "Could not reach external SLO checker" }
+                logger.error { "Could not reach external SLO checker." }
             } else {
                 val booleanResult = result.text.toBoolean()
-                logger.info { "SLO checker result is: $booleanResult" }
+                logger.info { "SLO checker result is: $booleanResult." }
                 return booleanResult
             }
         }
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/SloChecker.kt b/theodolite/src/main/kotlin/theodolite/evaluation/SloChecker.kt
index af70fa5dca3f0556d38791ed96c2af30b9a44a68..82f903f5be868731d58ebefd6279d5d438bd5eab 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/SloChecker.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/SloChecker.kt
@@ -11,7 +11,7 @@ interface SloChecker {
      * Evaluates [fetchedData] and returns if the experiments were successful.
      *
      * @param fetchedData from Prometheus that will be evaluated.
-     * @return true if experiments were successful. Otherwise false.
+     * @return true if experiments were successful. Otherwise, false.
      */
     fun evaluate(fetchedData: List<PrometheusResponse>): Boolean
 }
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/SloCheckerFactory.kt b/theodolite/src/main/kotlin/theodolite/evaluation/SloCheckerFactory.kt
index 64f9110cd931feef41dc65f88d6623e82f4e03a2..f57cebfcb13d0e86919ec15a0a479d1258e318a6 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/SloCheckerFactory.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/SloCheckerFactory.kt
@@ -43,15 +43,32 @@ class SloCheckerFactory {
         properties: MutableMap<String, String>,
         load: LoadDimension
     ): SloChecker {
-        return when (sloType.toLowerCase()) {
-            SloTypes.LAG_TREND.value, SloTypes.DROPPED_RECORDS.value -> ExternalSloChecker(
+        return when (SloTypes.from(sloType)) {
+            SloTypes.GENERIC -> ExternalSloChecker(
                 externalSlopeURL = properties["externalSloUrl"]
                     ?: throw IllegalArgumentException("externalSloUrl expected"),
-                threshold = properties["threshold"]?.toInt() ?: throw IllegalArgumentException("threshold expected"),
-                warmup = properties["warmup"]?.toInt() ?: throw IllegalArgumentException("warmup expected")
+                // TODO validate property contents
+                metadata = mapOf(
+                    "warmup" to (properties["warmup"]?.toInt() ?: throw IllegalArgumentException("warmup expected")),
+                    "queryAggregation" to (properties["queryAggregation"]
+                        ?: throw IllegalArgumentException("queryAggregation expected")),
+                    "repetitionAggregation" to (properties["repetitionAggregation"]
+                        ?: throw IllegalArgumentException("repetitionAggregation expected")),
+                    "operator" to (properties["operator"] ?: throw IllegalArgumentException("operator expected")),
+                    "threshold" to (properties["threshold"]?.toInt()
+                        ?: throw IllegalArgumentException("threshold expected"))
+                )
             )
-
-                SloTypes.LAG_TREND_RATIO.value, SloTypes.DROPPED_RECORDS_RATIO.value -> {
+            SloTypes.LAG_TREND, SloTypes.DROPPED_RECORDS -> ExternalSloChecker(
+                externalSlopeURL = properties["externalSloUrl"]
+                    ?: throw IllegalArgumentException("externalSloUrl expected"),
+                metadata = mapOf(
+                    "warmup" to (properties["warmup"]?.toInt() ?: throw IllegalArgumentException("warmup expected")),
+                    "threshold" to (properties["threshold"]?.toInt()
+                        ?: throw IllegalArgumentException("threshold expected"))
+                )
+            )
+            SloTypes.LAG_TREND_RATIO, SloTypes.DROPPED_RECORDS_RATIO -> {
                 val thresholdRatio =
                     properties["ratio"]?.toDouble()
                         ?: throw IllegalArgumentException("ratio for threshold expected")
@@ -64,11 +81,13 @@ class SloCheckerFactory {
                 ExternalSloChecker(
                     externalSlopeURL = properties["externalSloUrl"]
                         ?: throw IllegalArgumentException("externalSloUrl expected"),
-                    threshold = threshold,
-                    warmup = properties["warmup"]?.toInt() ?: throw IllegalArgumentException("warmup expected")
+                    metadata = mapOf(
+                        "warmup" to (properties["warmup"]?.toInt()
+                            ?: throw IllegalArgumentException("warmup expected")),
+                        "threshold" to threshold
+                    )
                 )
             }
-            else -> throw IllegalArgumentException("Slotype $sloType not found.")
         }
     }
 }
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/SloConfigHandler.kt b/theodolite/src/main/kotlin/theodolite/evaluation/SloConfigHandler.kt
index 93929218c822030ff065dafb19cce1fbaa69a179..425a4f3b0634d53f8b1d5c4b8abdba9ca81c3f2b 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/SloConfigHandler.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/SloConfigHandler.kt
@@ -1,5 +1,6 @@
 package theodolite.evaluation
 
+import theodolite.benchmark.BenchmarkExecution
 import theodolite.util.InvalidPatcherConfigurationException
 import javax.enterprise.context.ApplicationScoped
 
@@ -7,13 +8,14 @@ private const val CONSUMER_LAG_QUERY = "sum by(group)(kafka_consumergroup_group_
 private const val DROPPED_RECORDS_QUERY = "sum by(job) (kafka_streams_stream_task_metrics_dropped_records_total>=0)"
 
 @ApplicationScoped
-class SloConfigHandler() {
+class SloConfigHandler {
     companion object {
-        fun getQueryString(sloType: String): String {
-            return when (sloType.toLowerCase()) {
+        fun getQueryString(slo: BenchmarkExecution.Slo): String {
+            return when (slo.sloType.toLowerCase()) {
+                SloTypes.GENERIC.value -> slo.properties["promQLQuery"] ?: throw IllegalArgumentException("promQLQuery expected")
                 SloTypes.LAG_TREND.value, SloTypes.LAG_TREND_RATIO.value -> CONSUMER_LAG_QUERY
                 SloTypes.DROPPED_RECORDS.value, SloTypes.DROPPED_RECORDS_RATIO.value -> DROPPED_RECORDS_QUERY
-                else -> throw  InvalidPatcherConfigurationException("Could not find Prometheus query string for slo type $sloType")
+                else -> throw  InvalidPatcherConfigurationException("Could not find Prometheus query string for slo type $slo.sloType")
             }
         }
     }
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/SloJson.kt b/theodolite/src/main/kotlin/theodolite/evaluation/SloJson.kt
index fc9fe17b255dbb5ae68881538d8d2a50a191edb1..205389276f2c1adef6cba6c745baf99744c8d2dd 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/SloJson.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/SloJson.kt
@@ -3,61 +3,17 @@ package theodolite.evaluation
 import com.google.gson.Gson
 import theodolite.util.PromResult
 
-class SloJson private constructor(
-    val results: List<List<PromResult>?>? = null,
-    var metadata: MutableMap<String, Any>? = null
+class SloJson constructor(
+    val results: List<List<PromResult>>,
+    var metadata: Map<String, Any>
 ) {
 
-    data class Builder(
-        var results:List<List<PromResult>?>? = null,
-        var metadata: MutableMap<String, Any>? = null
-    ) {
-
-        /**
-         *  Set the results
-         *
-         * @param results list of prometheus results
-         */
-        fun results(results: List<List<PromResult>?>) = apply { this.results = results }
-
-        /**
-         * Add metadata as key value pairs
-         *
-         * @param key key of the metadata to be added
-         * @param value value of the metadata to be added
-         */
-        fun addMetadata(key: String, value: String) = apply {
-            if (this.metadata.isNullOrEmpty()) {
-                this.metadata = mutableMapOf(key to value)
-            } else {
-                this.metadata!![key] = value
-            }
-        }
-
-        /**
-         * Add metadata as key value pairs
-         *
-         * @param key key of the metadata to be added
-         * @param value value of the metadata to be added
-         */
-        fun addMetadata(key: String, value: Int) = apply {
-            if (this.metadata.isNullOrEmpty()) {
-                this.metadata = mutableMapOf(key to value)
-            } else {
-                this.metadata!![key] = value
-            }
-        }
-
-        fun build() = SloJson(
-            results = results,
-            metadata = metadata
+    fun toJson(): String {
+        return Gson().toJson(
+            mapOf(
+                "results" to this.results,
+                "metadata" to this.metadata
+            )
         )
     }
-
-   fun  toJson(): String {
-       return Gson().toJson(mapOf(
-           "results" to this.results,
-           "metadata" to this.metadata
-       ))
-    }
 }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/SloTypes.kt b/theodolite/src/main/kotlin/theodolite/evaluation/SloTypes.kt
index ac9de35861b0bd9c012bfb0b8cfcb2e1aa5aed68..812b50de779d2f3abfd5788b8aee145edc959e6c 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/SloTypes.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/SloTypes.kt
@@ -1,10 +1,14 @@
 package theodolite.evaluation
 
 enum class SloTypes(val value: String) {
+    GENERIC("generic"),
     LAG_TREND("lag trend"),
     LAG_TREND_RATIO("lag trend ratio"),
     DROPPED_RECORDS("dropped records"),
-    DROPPED_RECORDS_RATIO("dropped records ratio")
-
+    DROPPED_RECORDS_RATIO("dropped records ratio");
 
+    companion object {
+        fun from(type: String): SloTypes =
+            values().find { it.value == type } ?: throw IllegalArgumentException("Requested SLO does not exist")
+    }
 }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/execution/TheodoliteExecutor.kt b/theodolite/src/main/kotlin/theodolite/execution/TheodoliteExecutor.kt
index 315d1cf1afe7fd2ffbfc1c437d725d4dff29f637..8596576e0a7984c32b6dabf90c6bbf06961d2bb1 100644
--- a/theodolite/src/main/kotlin/theodolite/execution/TheodoliteExecutor.kt
+++ b/theodolite/src/main/kotlin/theodolite/execution/TheodoliteExecutor.kt
@@ -137,6 +137,12 @@ class TheodoliteExecutor(
                 config.compositeStrategy.benchmarkExecutor.results,
                 "${resultsFolder}exp${this.config.executionId}-result"
             )
+            // Create expXYZ_demand.csv file
+            ioHandler.writeToCSVFile(
+                "${resultsFolder}exp${this.config.executionId}_demand",
+                calculateDemandMetric(config.loads, config.compositeStrategy.benchmarkExecutor.results),
+                listOf("load","resources")
+            )
         }
         kubernetesBenchmark.teardownInfrastructure()
     }
@@ -151,4 +157,8 @@ class TheodoliteExecutor(
         return executionID
     }
 
+    private fun calculateDemandMetric(loadDimensions: List<LoadDimension>, results: Results): List<List<String>> {
+        return loadDimensions.map { listOf(it.get().toString(), results.getMinRequiredInstances(it).get().toString()) }
+    }
+
 }
diff --git a/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt b/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt
new file mode 100644
index 0000000000000000000000000000000000000000..959b04a8e5c94806aea1753af56b2518436aed12
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt
@@ -0,0 +1,201 @@
+package theodolite.execution.operator
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.fabric8.kubernetes.api.model.apps.StatefulSet
+import io.fabric8.kubernetes.client.NamespacedKubernetesClient
+import io.fabric8.kubernetes.client.dsl.MixedOperation
+import io.fabric8.kubernetes.client.dsl.Resource
+import theodolite.benchmark.Action
+import theodolite.benchmark.ActionSelector
+import theodolite.benchmark.KubernetesBenchmark
+import theodolite.benchmark.ResourceSets
+import theodolite.model.crd.BenchmarkCRD
+import theodolite.model.crd.BenchmarkStates
+import theodolite.model.crd.KubernetesBenchmarkList
+
+class BenchmarkStateChecker(
+    private val benchmarkCRDClient: MixedOperation<BenchmarkCRD, KubernetesBenchmarkList, Resource<BenchmarkCRD>>,
+    private val benchmarkStateHandler: BenchmarkStateHandler,
+    private val client: NamespacedKubernetesClient
+
+) {
+
+    fun start(running: Boolean) {
+        Thread {
+            while (running) {
+                updateBenchmarkStatus()
+                Thread.sleep(100 * 1)
+            }
+        }.start()
+    }
+
+    /**
+     * Checks and updates the states off all deployed benchmarks.
+     *
+     */
+    fun updateBenchmarkStatus() {
+        this.benchmarkCRDClient
+            .list()
+            .items
+            .map { it.spec.name = it.metadata.name; it }
+            .map { Pair(it, checkState(it.spec)) }
+            .forEach { setState(it.first, it.second) }
+    }
+
+    private fun setState(resource: BenchmarkCRD, state: BenchmarkStates) {
+        benchmarkStateHandler.setResourceSetState(resource.spec.name, state)
+    }
+
+    /**
+     * Checks the state of the benchmark.
+     *
+     * @param benchmark The benchmark to check
+     * @return [BenchmarkStates.READY] iff all resource could be loaded and all actions could be executed, [BenchmarkStates.PENDING] else
+     */
+    private fun checkState(benchmark: KubernetesBenchmark): BenchmarkStates {
+        return if (checkActionCommands(benchmark) == BenchmarkStates.READY
+            && checkResources(benchmark) == BenchmarkStates.READY
+        ) {
+            BenchmarkStates.READY
+        } else {
+            BenchmarkStates.PENDING
+        }
+    }
+
+    /**
+     * Checks if all specified actions of the given benchmark could be executed or not
+     *
+     * @param benchmark The benchmark to check
+     * @return The state of this benchmark. [BenchmarkStates.READY] if all actions could be executed, else [BenchmarkStates.PENDING]
+     */
+    private fun checkActionCommands(benchmark: KubernetesBenchmark): BenchmarkStates {
+        return if (checkIfActionPossible(benchmark.infrastructure.resources, benchmark.sut.beforeActions)
+            && checkIfActionPossible(benchmark.infrastructure.resources, benchmark.sut.afterActions)
+            && checkIfActionPossible(benchmark.infrastructure.resources, benchmark.loadGenerator.beforeActions)
+            && checkIfActionPossible(benchmark.infrastructure.resources, benchmark.loadGenerator.beforeActions)
+        ) {
+            BenchmarkStates.READY
+        } else {
+            BenchmarkStates.PENDING
+        }
+    }
+
+    /**
+     * Action commands are called on a pod. To verify that an action command can be executed,
+     * it checks that the specified pods are either currently running in the cluster or
+     * have been specified as infrastructure in the benchmark.
+     *
+     * @param benchmark the benchmark to check
+     * @param actions the actions
+     * @return true if all actions could be executed, else false
+     */
+    private fun checkIfActionPossible(resourcesSets: List<ResourceSets>, actions: List<Action>): Boolean {
+        return !actions.map {
+            checkIfResourceIsDeployed(it.selector) || checkIfResourceIsInfrastructure(resourcesSets, it.selector)
+        }.contains(false)
+    }
+
+    /**
+     * Checks for the given actionSelector whether the required resources are already deployed in the cluster or not
+     *
+     * @param selector the actionSelector to check
+     * @return true if the required resources are found, else false
+     */
+    fun checkIfResourceIsDeployed(selector: ActionSelector): Boolean {
+        val pods = this.client
+            .pods()
+            .withLabels(selector.pod.matchLabels)
+            .list()
+            .items
+
+        return if (pods.isNotEmpty() && selector.container.isNotEmpty()) {
+            pods.map { pod ->
+                pod
+                    .spec
+                    .containers
+                    .map { it.name }
+                    .contains(selector.container)
+            }.contains(true)
+        } else {
+            pods.isNotEmpty()
+        }
+    }
+
+    /**
+     * Checks for the given actionSelector whether the required resources are specified as infrastructure or not
+     *
+     * @param benchmark the benchmark to check
+     * @param selector the actionSelector to check
+     * @return true if the required resources are found, else false
+     */
+    fun checkIfResourceIsInfrastructure(resourcesSets: List<ResourceSets>, selector: ActionSelector): Boolean {
+        val resources = resourcesSets.flatMap { it.loadResourceSet(this.client) }
+        if (resources.isEmpty()) {
+            return false
+        }
+
+        var podExist = resources.map { it.second }
+            .filterIsInstance<Deployment>()
+            .filter { it.metadata.labels.containsMatchLabels(selector.pod.matchLabels) }
+            .any {
+                if (selector.container.isNotEmpty()) {
+                    it.spec.template.spec.containers.map { it.name }.contains(selector.container)
+                } else {
+                    true
+                }
+            }
+
+        if (podExist) {
+            return true
+        }
+
+        podExist = resources.map { it.second }
+            .filterIsInstance<StatefulSet>()
+            .filter { it.metadata.labels.containsMatchLabels(selector.pod.matchLabels) }
+            .any {
+                if (selector.container.isNotEmpty()) {
+                    it.spec.template.spec.containers.map { it.name }.contains(selector.container)
+                } else {
+                    true
+                }
+            }
+
+        if (podExist) {
+            return true
+        }
+
+        return false
+    }
+
+    /**
+     * Checks if it is possible to load all specified Kubernetes manifests.
+     *
+     * @param benchmark The benchmark to check
+     * @return The state of this benchmark. [BenchmarkStates.READY] if all resources could be loaded, else [BenchmarkStates.PENDING]
+     */
+    fun checkResources(benchmark: KubernetesBenchmark): BenchmarkStates {
+        return try {
+            val appResources =
+                benchmark.loadKubernetesResources(resourceSet = benchmark.sut.resources)
+            val loadGenResources =
+                benchmark.loadKubernetesResources(resourceSet = benchmark.loadGenerator.resources)
+            if (appResources.isNotEmpty() && loadGenResources.isNotEmpty()) {
+                BenchmarkStates.READY
+            } else {
+                BenchmarkStates.PENDING
+            }
+        } catch (e: Exception) {
+            BenchmarkStates.PENDING
+        }
+    }
+}
+
+private fun <K, V> MutableMap<K, V>.containsMatchLabels(matchLabels: MutableMap<V, V>): Boolean {
+    for (kv in matchLabels) {
+        if (kv.value != this[kv.key as K]) {
+            return false
+        }
+    }
+    return true
+}
+
diff --git a/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteController.kt b/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteController.kt
index 70e30cf84ef40796eb085a0d68eb2e323232fde9..f066c01024fef98fc3e6e2070b0ed98235a1f8bb 100644
--- a/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteController.kt
+++ b/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteController.kt
@@ -29,7 +29,7 @@ class TheodoliteController(
     private val executionCRDClient: MixedOperation<ExecutionCRD, BenchmarkExecutionList, Resource<ExecutionCRD>>,
     private val benchmarkCRDClient: MixedOperation<BenchmarkCRD, KubernetesBenchmarkList, Resource<BenchmarkCRD>>,
     private val executionStateHandler: ExecutionStateHandler,
-    private val benchmarkStateHandler: BenchmarkStateHandler
+    private val benchmarkStateChecker: BenchmarkStateChecker
 ) {
     lateinit var executor: TheodoliteExecutor
 
@@ -41,7 +41,7 @@ class TheodoliteController(
         sleep(5000) // wait until all states are correctly set
         while (true) {
             reconcile()
-            updateBenchmarkStatus()
+            benchmarkStateChecker.start(true)
             sleep(2000)
         }
     }
@@ -49,7 +49,6 @@ class TheodoliteController(
     private fun reconcile() {
         do {
             val execution = getNextExecution()
-            updateBenchmarkStatus()
             if (execution != null) {
                 val benchmark = getBenchmarks()
                     .map { it.spec }
@@ -110,8 +109,7 @@ class TheodoliteController(
                 type = "WARNING",
                 reason = "Execution failed",
                 message = "An error occurs while executing:  ${e.message}")
-            logger.error { "Failure while executing execution ${execution.name} with benchmark ${benchmark.name}." }
-            logger.error { "Problem is: $e" }
+            logger.error(e) { "Failure while executing execution ${execution.name} with benchmark ${benchmark.name}." }
             executionStateHandler.setExecutionState(execution.name, ExecutionStates.FAILURE)
         }
         executionStateHandler.stopDurationStateTimer()
@@ -139,7 +137,6 @@ class TheodoliteController(
             }
     }
 
-
     /**
      * Get the [BenchmarkExecution] for the next run. Which [BenchmarkExecution]
      * is selected for the next execution depends on three points:
@@ -173,34 +170,7 @@ class TheodoliteController(
             .firstOrNull()
     }
 
-    private fun updateBenchmarkStatus() {
-        this.benchmarkCRDClient
-            .list()
-            .items
-            .map { it.spec.name = it.metadata.name; it }
-            .map { Pair(it, checkResource(it.spec)) }
-            .forEach { setState(it.first, it.second ) }
-    }
-
-    private fun setState(resource: BenchmarkCRD, state: BenchmarkStates) {
-        benchmarkStateHandler.setResourceSetState(resource.spec.name, state)
-    }
 
-    private fun checkResource(benchmark: KubernetesBenchmark): BenchmarkStates {
-        return try {
-            val appResources =
-                benchmark.loadKubernetesResources(resourceSet = benchmark.sut.resources)
-            val loadGenResources =
-                benchmark.loadKubernetesResources(resourceSet = benchmark.sut.resources)
-            if(appResources.isNotEmpty() && loadGenResources.isNotEmpty()) {
-                BenchmarkStates.READY
-            } else {
-                BenchmarkStates.PENDING
-            }
-        } catch (e: Exception) {
-            BenchmarkStates.PENDING
-        }
-    }
 
     fun isExecutionRunning(executionName: String): Boolean {
         if (!::executor.isInitialized) return false
diff --git a/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteOperator.kt b/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteOperator.kt
index 4850a44fdddba117178e29d3170f44a95df646e7..135ffeaef1a5165482d9d6f7f8f5f3dffd596574 100644
--- a/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteOperator.kt
+++ b/theodolite/src/main/kotlin/theodolite/execution/operator/TheodoliteOperator.kt
@@ -34,6 +34,7 @@ class TheodoliteOperator {
     private lateinit var controller: TheodoliteController
     private lateinit var executionStateHandler: ExecutionStateHandler
     private lateinit var benchmarkStateHandler: BenchmarkStateHandler
+    private lateinit var benchmarkStateChecker: BenchmarkStateChecker
 
 
     fun start() {
@@ -71,7 +72,7 @@ class TheodoliteOperator {
             controller = getController(
                 client = client,
                 executionStateHandler = getExecutionStateHandler(client = client),
-                benchmarkStateHandler = getBenchmarkStateHandler(client = client)
+                benchmarkStateChecker = getBenchmarkStateChecker(client = client)
 
             )
             getExecutionEventHandler(controller, client).startAllRegisteredInformers()
@@ -112,17 +113,28 @@ class TheodoliteOperator {
         return benchmarkStateHandler
     }
 
+    fun getBenchmarkStateChecker(client: NamespacedKubernetesClient) : BenchmarkStateChecker {
+        if (!::benchmarkStateChecker.isInitialized) {
+            this.benchmarkStateChecker = BenchmarkStateChecker(
+                client = client,
+                benchmarkStateHandler = getBenchmarkStateHandler(client = client),
+                benchmarkCRDClient = getBenchmarkClient(client = client))
+        }
+        return benchmarkStateChecker
+    }
+
+
     fun getController(
         client: NamespacedKubernetesClient,
         executionStateHandler: ExecutionStateHandler,
-        benchmarkStateHandler: BenchmarkStateHandler
+        benchmarkStateChecker: BenchmarkStateChecker
     ): TheodoliteController {
         if (!::controller.isInitialized) {
             this.controller = TheodoliteController(
                 benchmarkCRDClient = getBenchmarkClient(client),
                 executionCRDClient = getExecutionClient(client),
                 executionStateHandler = executionStateHandler,
-                benchmarkStateHandler = benchmarkStateHandler
+                benchmarkStateChecker = benchmarkStateChecker
             )
         }
         return this.controller
diff --git a/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/GuessStrategy.kt b/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/GuessStrategy.kt
new file mode 100644
index 0000000000000000000000000000000000000000..786a3baf159e94841c1f76c696f030718e8f768f
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/GuessStrategy.kt
@@ -0,0 +1,22 @@
+package theodolite.strategies.searchstrategy
+
+import io.quarkus.runtime.annotations.RegisterForReflection
+import theodolite.util.Resource
+
+/**
+ * Base class for the implementation of Guess strategies. Guess strategies are strategies to determine the resource
+ * demand we start with in our initial guess search strategy.
+ */
+
+@RegisterForReflection
+abstract class GuessStrategy {
+    /**
+     * Computing the resource demand for the initial guess search strategy to start with.
+     *
+     * @param resources List of all possible [Resource]s.
+     * @param lastLowestResource Previous resource demand needed for the given load.
+     *
+     * @return Returns the resource demand to start the initial guess search strategy with or null
+     */
+    abstract fun firstGuess(resources: List<Resource>, lastLowestResource: Resource?): Resource?
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/InitialGuessSearchStrategy.kt b/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/InitialGuessSearchStrategy.kt
new file mode 100644
index 0000000000000000000000000000000000000000..d97fb62cc9d37dd50122199e5d089c491784e511
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/InitialGuessSearchStrategy.kt
@@ -0,0 +1,93 @@
+package theodolite.strategies.searchstrategy
+
+import mu.KotlinLogging
+import theodolite.execution.BenchmarkExecutor
+import theodolite.util.LoadDimension
+import theodolite.util.Resource
+import theodolite.util.Results
+
+private val logger = KotlinLogging.logger {}
+
+/**
+ *  Search strategy implementation for determining the smallest suitable resource demand.
+ *  Starting with a resource amount provided by a guess strategy.
+ *
+ * @param benchmarkExecutor Benchmark executor which runs the individual benchmarks.
+ * @param guessStrategy Strategy that provides us with a guess for the first resource amount.
+ * @param results current results of all previously performed benchmarks.
+ */
+class InitialGuessSearchStrategy(benchmarkExecutor: BenchmarkExecutor, guessStrategy: GuessStrategy, results: Results) :
+        SearchStrategy(benchmarkExecutor, guessStrategy, results) {
+
+    override fun findSuitableResource(load: LoadDimension, resources: List<Resource>): Resource? {
+
+        if(resources.isEmpty()) {
+            logger.info { "You need to specify resources to be checked for the InitialGuessSearchStrategy to work." }
+            return null
+        }
+
+        if(guessStrategy == null){
+            logger.info { "Your InitialGuessSearchStrategy doesn't have a GuessStrategy. This is not supported." }
+            return null
+        }
+
+        if(results == null){
+            logger.info { "The results need to be initialized." }
+            return null
+        }
+
+
+        var lastLowestResource : Resource? = null
+
+        // Getting the lastLowestResource from results and calling firstGuess() with it
+        if (!results.isEmpty()) {
+            val maxLoad: LoadDimension? = this.results.getMaxBenchmarkedLoad(load)
+            lastLowestResource = this.results.getMinRequiredInstances(maxLoad)
+            if (lastLowestResource.get() == Int.MAX_VALUE) lastLowestResource = null
+        }
+        lastLowestResource = this.guessStrategy.firstGuess(resources, lastLowestResource)
+
+        if (lastLowestResource != null) {
+            val resourcesToCheck: List<Resource>
+            val startIndex: Int = resources.indexOf(lastLowestResource)
+
+            logger.info { "Running experiment with load '${load.get()}' and resources '${lastLowestResource.get()}'" }
+
+            // If the first experiment passes, starting downward linear search
+            // otherwise starting upward linear search
+            if (this.benchmarkExecutor.runExperiment(load, lastLowestResource)) {
+
+                resourcesToCheck = resources.subList(0, startIndex).reversed()
+                if (resourcesToCheck.isEmpty()) return lastLowestResource
+
+                var currentMin: Resource = lastLowestResource
+                for (res in resourcesToCheck) {
+
+                    logger.info { "Running experiment with load '${load.get()}' and resources '${res.get()}'" }
+                    if (this.benchmarkExecutor.runExperiment(load, res)) {
+                        currentMin = res
+                    }
+                }
+                return currentMin
+            }
+            else {
+                if (resources.size <= startIndex + 1) {
+                    logger.info{ "No more resources left to check." }
+                    return null
+                }
+                resourcesToCheck = resources.subList(startIndex + 1, resources.size)
+
+                for (res in resourcesToCheck) {
+
+                    logger.info { "Running experiment with load '${load.get()}' and resources '${res.get()}'" }
+                    if (this.benchmarkExecutor.runExperiment(load, res)) return res
+                }
+            }
+        }
+        else {
+            logger.info { "InitialGuessSearchStrategy called without lastLowestResource value, which is needed as a " +
+                    "starting point!" }
+        }
+        return null
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/PrevResourceMinGuess.kt b/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/PrevResourceMinGuess.kt
new file mode 100644
index 0000000000000000000000000000000000000000..413eecea27279cd79bad155fbb7d5d18b674a12e
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/PrevResourceMinGuess.kt
@@ -0,0 +1,24 @@
+package theodolite.strategies.searchstrategy
+
+import theodolite.util.Resource
+
+/**
+ * This Guess strategy takes the minimal resource demand of the previous load, which is given as an argument for the
+ * firstGuess function.
+ */
+
+class PrevResourceMinGuess() : GuessStrategy(){
+
+    /**
+     * @param resources List of all possible [Resource]s.
+     * @param lastLowestResource Previous resource demand needed for the given load.
+     *
+     * @return the value of lastLowestResource if given otherwise the first element of the resource list or null
+     */
+    override fun firstGuess(resources: List<Resource>, lastLowestResource: Resource?): Resource? {
+
+        if (lastLowestResource != null) return lastLowestResource
+        else if(resources.isNotEmpty()) return resources[0]
+        else return null
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/SearchStrategy.kt b/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/SearchStrategy.kt
index 4e304b010d4d56f6b5fe734a6b977361f93e57a1..97c723f2cfe459081cbb327f6860e48319c8f4f1 100644
--- a/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/SearchStrategy.kt
+++ b/theodolite/src/main/kotlin/theodolite/strategies/searchstrategy/SearchStrategy.kt
@@ -4,14 +4,18 @@ import io.quarkus.runtime.annotations.RegisterForReflection
 import theodolite.execution.BenchmarkExecutor
 import theodolite.util.LoadDimension
 import theodolite.util.Resource
+import theodolite.util.Results
 
 /**
  *  Base class for the implementation for SearchStrategies. SearchStrategies determine the smallest suitable number of instances.
  *
  * @param benchmarkExecutor Benchmark executor which runs the individual benchmarks.
+ * @param guessStrategy Guess strategy for the initial resource amount in case the InitialGuessStrategy is selected.
+ * @param results the [Results] object.
  */
 @RegisterForReflection
-abstract class SearchStrategy(val benchmarkExecutor: BenchmarkExecutor) {
+abstract class SearchStrategy(val benchmarkExecutor: BenchmarkExecutor, val guessStrategy: GuessStrategy? = null,
+                              val results: Results? = null) {
     /**
      * Find smallest suitable resource from the specified resource list for the given load.
      *
diff --git a/theodolite/src/main/kotlin/theodolite/util/ActionCommandFailedException.kt b/theodolite/src/main/kotlin/theodolite/util/ActionCommandFailedException.kt
new file mode 100644
index 0000000000000000000000000000000000000000..c1a8fc401961370d2f07bfffe43f0ae4dc441d25
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/util/ActionCommandFailedException.kt
@@ -0,0 +1,4 @@
+package theodolite.util
+
+class ActionCommandFailedException(message: String, e: Exception? = null) : DeploymentFailedException(message,e) {
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/util/Configuration.kt b/theodolite/src/main/kotlin/theodolite/util/Configuration.kt
index dac3b943e69bd7e208d318f2a788275f19db11e4..7b1232cd9ba72344cdb438f974cd6c4d17fd690d 100644
--- a/theodolite/src/main/kotlin/theodolite/util/Configuration.kt
+++ b/theodolite/src/main/kotlin/theodolite/util/Configuration.kt
@@ -13,6 +13,11 @@ class Configuration(
         val NAMESPACE = System.getenv("NAMESPACE") ?: DEFAULT_NAMESPACE
         val COMPONENT_NAME = System.getenv("COMPONENT_NAME") ?: DEFAULT_COMPONENT_NAME
         val EXECUTION_MODE = System.getenv("MODE") ?: ExecutionModes.STANDALONE.value
+
+        /**
+         * Specifies how long Theodolite should wait (in sec) before aborting the execution of an action command.
+         */
+        const val TIMEOUT_SECONDS: Long = 30L
     }
 
 }
diff --git a/theodolite/src/main/kotlin/theodolite/util/Results.kt b/theodolite/src/main/kotlin/theodolite/util/Results.kt
index 60641ea0248435de53aaaaf362da7be995b391c5..2221c2e64f6dbc1776122f20793aa8d04d621d9d 100644
--- a/theodolite/src/main/kotlin/theodolite/util/Results.kt
+++ b/theodolite/src/main/kotlin/theodolite/util/Results.kt
@@ -3,7 +3,7 @@ package theodolite.util
 import io.quarkus.runtime.annotations.RegisterForReflection
 
 /**
- * Central class that saves the state of a execution of Theodolite. For an execution, it is used to save the result of
+ * Central class that saves the state of an execution of Theodolite. For an execution, it is used to save the result of
  * individual experiments. Further, it is used by the RestrictionStrategy to
  * perform the [theodolite.strategies.restriction.RestrictionStrategy].
  */
@@ -44,16 +44,16 @@ class Results {
      * If no experiments have been marked as either successful or unsuccessful
      * yet, a Resource with the constant value Int.MIN_VALUE is returned.
      */
-    fun getMinRequiredInstances(load: LoadDimension?): Resource? {
+    fun getMinRequiredInstances(load: LoadDimension?): Resource {
         if (this.results.isEmpty()) {
             return Resource(Int.MIN_VALUE, emptyList())
         }
 
-        var minRequiredInstances: Resource? = Resource(Int.MAX_VALUE, emptyList())
+        var minRequiredInstances = Resource(Int.MAX_VALUE, emptyList())
         for (experiment in results) {
             // Get all successful experiments for requested load
             if (experiment.key.first == load && experiment.value) {
-                if (minRequiredInstances == null || experiment.key.second.get() < minRequiredInstances.get()) {
+                if (experiment.key.second.get() < minRequiredInstances.get()) {
                     // Found new smallest resources
                     minRequiredInstances = experiment.key.second
                 }
@@ -83,4 +83,13 @@ class Results {
         }
         return maxBenchmarkedLoad
     }
+
+    /**
+     * Checks whether the results are empty.
+     *
+     * @return true if [results] is empty.
+     */
+    fun isEmpty(): Boolean{
+        return results.isEmpty()
+    }
 }
diff --git a/theodolite/src/test/kotlin/theodolite/InitialGuessSearchStrategyTest.kt b/theodolite/src/test/kotlin/theodolite/InitialGuessSearchStrategyTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..1af6f548b219697009c688ace712a9f7f5620bd0
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/InitialGuessSearchStrategyTest.kt
@@ -0,0 +1,133 @@
+package theodolite
+
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.Assertions.assertEquals
+import org.junit.jupiter.api.Test
+import theodolite.benchmark.BenchmarkExecution
+import theodolite.strategies.searchstrategy.InitialGuessSearchStrategy
+import theodolite.util.LoadDimension
+import theodolite.util.Resource
+import theodolite.util.Results
+import mu.KotlinLogging
+import theodolite.strategies.searchstrategy.PrevResourceMinGuess
+
+private val logger = KotlinLogging.logger {}
+
+@QuarkusTest
+class InitialGuessSearchStrategyTest {
+
+    @Test
+    fun testInitialGuessSearch() {
+        val mockResults = arrayOf(
+            arrayOf(true, true, true, true, true, true, true),
+            arrayOf(false, false, true, true, true, true, true),
+            arrayOf(false, false, true, true, true, true, true),
+            arrayOf(false, false, false, true, true, true, true),
+            arrayOf(false, false, false, false, true, true, true),
+            arrayOf(false, false, false, false, false, false, true),
+            arrayOf(false, false, false, false, false, false, false)
+        )
+        val mockLoads: List<LoadDimension> = (0..6).map { number -> LoadDimension(number, emptyList()) }
+        val mockResources: List<Resource> = (0..6).map { number -> Resource(number, emptyList()) }
+        val results = Results()
+        val benchmark = TestBenchmark()
+        val guessStrategy = PrevResourceMinGuess()
+        val sloChecker: BenchmarkExecution.Slo = BenchmarkExecution.Slo()
+        val benchmarkExecutor = TestBenchmarkExecutorImpl(mockResults, benchmark, results, listOf(sloChecker), 0, 0, 5)
+        val strategy = InitialGuessSearchStrategy(benchmarkExecutor,guessStrategy, results)
+
+        val actual: ArrayList<Resource?> = ArrayList()
+        val expected: ArrayList<Resource?> = ArrayList(listOf(0, 2, 2, 3, 4, 6).map { x -> Resource(x, emptyList()) })
+        expected.add(null)
+
+        for (load in mockLoads) {
+            val returnVal : Resource? = strategy.findSuitableResource(load, mockResources)
+            if(returnVal != null) {
+                logger.info { "returnVal '${returnVal.get()}'" }
+            }
+            else {
+                logger.info { "returnVal is null." }
+            }
+            actual.add(returnVal)
+        }
+
+        assertEquals(actual, expected)
+    }
+
+    @Test
+    fun testInitialGuessSearchLowerResourceDemandHigherLoad() {
+        val mockResults = arrayOf(
+            arrayOf(true, true, true, true, true, true, true),
+            arrayOf(false, false, true, true, true, true, true),
+            arrayOf(false, false, true, true, true, true, true),
+            arrayOf(false, true, true, true, true, true, true),
+            arrayOf(false, false, false, false, true, true, true),
+            arrayOf(false, false, false, false, false, false, true),
+            arrayOf(false, false, false, false, false, false, false)
+        )
+        val mockLoads: List<LoadDimension> = (0..6).map { number -> LoadDimension(number, emptyList()) }
+        val mockResources: List<Resource> = (0..6).map { number -> Resource(number, emptyList()) }
+        val results = Results()
+        val benchmark = TestBenchmark()
+        val guessStrategy = PrevResourceMinGuess()
+        val sloChecker: BenchmarkExecution.Slo = BenchmarkExecution.Slo()
+        val benchmarkExecutor = TestBenchmarkExecutorImpl(mockResults, benchmark, results, listOf(sloChecker), 0, 0, 5)
+        val strategy = InitialGuessSearchStrategy(benchmarkExecutor,guessStrategy, results)
+
+        val actual: ArrayList<Resource?> = ArrayList()
+        val expected: ArrayList<Resource?> = ArrayList(listOf(0, 2, 2, 1, 4, 6).map { x -> Resource(x, emptyList()) })
+        expected.add(null)
+
+        for (load in mockLoads) {
+            val returnVal : Resource? = strategy.findSuitableResource(load, mockResources)
+            if(returnVal != null) {
+                logger.info { "returnVal '${returnVal.get()}'" }
+            }
+            else {
+                logger.info { "returnVal is null." }
+            }
+            actual.add(returnVal)
+        }
+
+        assertEquals(actual, expected)
+    }
+
+    @Test
+    fun testInitialGuessSearchFirstNotDoable() {
+        val mockResults = arrayOf(
+                arrayOf(false, false, false, false, false, false, false),
+                arrayOf(false, false, true, true, true, true, true),
+                arrayOf(false, false, false, true, true, true, true),
+                arrayOf(true, true, true, true, true, true, true),
+                arrayOf(false, false, false, false, true, true, true),
+                arrayOf(false, false, false, false, false, false, true),
+                arrayOf(false, false, false, false, false, false, false)
+        )
+        val mockLoads: List<LoadDimension> = (0..6).map { number -> LoadDimension(number, emptyList()) }
+        val mockResources: List<Resource> = (0..6).map { number -> Resource(number, emptyList()) }
+        val results = Results()
+        val benchmark = TestBenchmark()
+        val guessStrategy = PrevResourceMinGuess()
+        val sloChecker: BenchmarkExecution.Slo = BenchmarkExecution.Slo()
+        val benchmarkExecutor = TestBenchmarkExecutorImpl(mockResults, benchmark, results, listOf(sloChecker), 0, 0, 5)
+        val strategy = InitialGuessSearchStrategy(benchmarkExecutor, guessStrategy, results)
+
+        val actual: ArrayList<Resource?> = ArrayList()
+        var expected: ArrayList<Resource?> = ArrayList(listOf(2, 3, 0, 4, 6).map { x -> Resource(x, emptyList()) })
+        expected.add(null)
+        expected = ArrayList(listOf(null) + expected)
+
+        for (load in mockLoads) {
+            val returnVal : Resource? = strategy.findSuitableResource(load, mockResources)
+            if(returnVal != null) {
+                logger.info { "returnVal '${returnVal.get()}'" }
+            }
+            else {
+                logger.info { "returnVal is null." }
+            }
+            actual.add(returnVal)
+        }
+
+        assertEquals(actual, expected)
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/benchmark/ActionCommandTest.kt b/theodolite/src/test/kotlin/theodolite/benchmark/ActionCommandTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..0e40fca5caf9fe721c547e09d2ba22c25860a1bf
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/benchmark/ActionCommandTest.kt
@@ -0,0 +1,128 @@
+package theodolite.benchmark
+
+import io.fabric8.kubernetes.api.model.Pod
+import io.fabric8.kubernetes.api.model.PodBuilder
+import io.fabric8.kubernetes.api.model.PodListBuilder
+import io.fabric8.kubernetes.client.server.mock.KubernetesServer
+import io.fabric8.kubernetes.client.utils.Utils
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.*
+import org.junit.jupiter.api.Assertions.assertEquals
+import theodolite.execution.operator.TheodoliteController
+import theodolite.execution.operator.TheodoliteOperator
+import theodolite.util.ActionCommandFailedException
+
+@QuarkusTest
+class ActionCommandTest {
+    private val server = KubernetesServer(false, false)
+    lateinit var controller: TheodoliteController
+
+    @BeforeEach
+    fun setUp() {
+        server.before()
+        val operator = TheodoliteOperator()
+        this.controller = operator.getController(
+            client = server.client,
+            executionStateHandler = operator.getExecutionStateHandler(client = server.client),
+            benchmarkStateChecker = operator.getBenchmarkStateChecker(client = server.client)
+        )
+
+        val pod: Pod = PodBuilder().withNewMetadata()
+            .withName("pod1")
+            .withResourceVersion("1")
+            .withLabels<String, String>(mapOf("app" to "pod"))
+            .withNamespace("test").and()
+            .build()
+
+        val ready: Pod = createReadyFrom(pod, "True")
+
+        val podList = PodListBuilder().build()
+        podList.items.add(0, ready)
+
+        server
+            .expect()
+            .withPath("/api/v1/namespaces/test/pods?labelSelector=${Utils.toUrlEncoded("app=pod")}")
+            .andReturn(200, podList)
+            .always()
+
+        server
+            .expect()
+            .get()
+            .withPath("/api/v1/namespaces/test/pods/pod1")
+            .andReturn(200, ready)
+            .always()
+
+        server
+            .expect()
+            .withPath("/api/v1/namespaces/test/pods/pod1/exec?command=ls&stdout=true&stderr=true")
+            .andUpgradeToWebSocket()
+            .open(ErrorChannelMessage("{\"metadata\":{},\"status\":\"Success\"}\n"))
+            .done()
+            .always()
+
+        server
+            .expect()
+            .withPath("/api/v1/namespaces/test/pods/pod1/exec?command=error-command&stdout=true&stderr=true")
+            .andUpgradeToWebSocket()
+            .open(ErrorChannelMessage("{\"metadata\":{},\"status\":\"failed\", \"details\":{}}\n"))
+            .done()
+            .always()
+    }
+
+    /**
+     * Copied from fabric8 Kubernetes Client repository
+     *
+     * @param pod
+     * @param status
+     * @return
+     */
+    fun createReadyFrom(pod: Pod, status: String): Pod {
+        return PodBuilder(pod)
+            .withNewStatus()
+            .addNewCondition()
+            .withType("Ready")
+            .withStatus(status)
+            .endCondition()
+            .endStatus()
+            .build()
+    }
+
+    @AfterEach
+    fun tearDown() {
+        server.after()
+    }
+
+    @Test
+    fun testGetPodName() {
+        assertEquals("pod1", ActionCommand(client = server.client).getPodName(mutableMapOf("app" to "pod"), 1))
+    }
+
+    @Test
+    fun testActionSuccess() {
+        val action = Action()
+        action.selector = ActionSelector()
+        action.selector.pod = PodSelector()
+        action.selector.pod.matchLabels = mutableMapOf("app" to "pod")
+        action.exec = Command()
+        action.exec.command = arrayOf("ls")
+        action.exec.timeoutSeconds = 10L
+
+        action.exec(server.client)
+        assertEquals(
+            "/api/v1/namespaces/test/pods/pod1/exec?command=ls&stdout=true&stderr=true",
+            server.lastRequest.path)
+    }
+
+    @Test
+    fun testActionFailed() {
+        val action = Action()
+        action.selector = ActionSelector()
+        action.selector.pod = PodSelector()
+        action.selector.pod.matchLabels = mutableMapOf("app" to "pod")
+        action.exec = Command()
+        action.exec.command = arrayOf("error-command")
+        action.exec.timeoutSeconds = 10L
+
+        assertThrows<ActionCommandFailedException> { run { action.exec(server.client) } }
+    }
+}
diff --git a/theodolite/src/test/kotlin/theodolite/benchmark/ErrorChannelMessage.kt b/theodolite/src/test/kotlin/theodolite/benchmark/ErrorChannelMessage.kt
new file mode 100644
index 0000000000000000000000000000000000000000..df57a2529653a39ccbde14b4a91d30352224457e
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/benchmark/ErrorChannelMessage.kt
@@ -0,0 +1,17 @@
+package theodolite.benchmark
+
+import io.fabric8.mockwebserver.internal.WebSocketMessage
+import java.nio.charset.StandardCharsets
+
+class ErrorChannelMessage(body: String) : WebSocketMessage(0L, getBodyBytes(OUT_STREAM_ID, body), true, true) {
+    companion object {
+        private const val OUT_STREAM_ID: Byte = 3
+        private fun getBodyBytes(prefix: Byte, body: String): ByteArray {
+            val original = body.toByteArray(StandardCharsets.UTF_8)
+            val prefixed = ByteArray(original.size + 1)
+            prefixed[0] = prefix
+            System.arraycopy(original, 0, prefixed, 1, original.size)
+            return prefixed
+        }
+    }
+}
diff --git a/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkCRDummy.kt b/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkCRDummy.kt
index e294ea539ea60104cc00e9f73de790302ad52670..b4d5950542c40aba0f39b1be772823a3de389793 100644
--- a/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkCRDummy.kt
+++ b/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkCRDummy.kt
@@ -34,6 +34,13 @@ class BenchmarkCRDummy(name: String) {
         benchmark.sut.resources = emptyList()
         benchmark.loadGenerator.resources = emptyList()
 
+        benchmark.infrastructure.beforeActions = emptyList()
+        benchmark.infrastructure.afterActions = emptyList()
+        benchmark.sut.beforeActions = emptyList()
+        benchmark.sut.afterActions = emptyList()
+        benchmark.loadGenerator.beforeActions = emptyList()
+        benchmark.loadGenerator.afterActions = emptyList()
+
         benchmark.resourceTypes = emptyList()
         benchmark.loadTypes = emptyList()
         benchmark.kafkaConfig = kafkaConfig
diff --git a/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkStateCheckerTest.kt b/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkStateCheckerTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..f3af42548d3bfc0d12e9f664d11cce1ae424e748
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkStateCheckerTest.kt
@@ -0,0 +1,177 @@
+package theodolite.execution.operator
+
+import com.google.gson.Gson
+import io.fabric8.kubernetes.api.model.ConfigMapBuilder
+import io.fabric8.kubernetes.api.model.Pod
+import io.fabric8.kubernetes.api.model.PodBuilder
+import io.fabric8.kubernetes.api.model.PodListBuilder
+import io.fabric8.kubernetes.api.model.apps.DeploymentBuilder
+import io.fabric8.kubernetes.client.server.mock.KubernetesServer
+import io.fabric8.kubernetes.client.server.mock.OutputStreamMessage
+import io.fabric8.kubernetes.client.utils.Utils
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+import org.junit.jupiter.api.Assertions.*
+import theodolite.benchmark.*
+import theodolite.model.crd.BenchmarkStates
+
+internal class BenchmarkStateCheckerTest {
+    private val server = KubernetesServer(false, false)
+    private val serverCrud = KubernetesServer(false, true)
+    private lateinit var checker: BenchmarkStateChecker
+    private lateinit var checkerCrud: BenchmarkStateChecker
+
+    @BeforeEach
+    fun setUp() {
+        server.before()
+        serverCrud.before()
+        val operator = TheodoliteOperator()
+        checker = BenchmarkStateChecker(
+            client = server.client,
+            benchmarkCRDClient = operator.getBenchmarkClient(server.client),
+            benchmarkStateHandler = operator.getBenchmarkStateHandler(server.client)
+        )
+
+        checkerCrud = BenchmarkStateChecker(
+            client = serverCrud.client,
+            benchmarkCRDClient = operator.getBenchmarkClient(serverCrud.client),
+            benchmarkStateHandler = operator.getBenchmarkStateHandler(serverCrud.client)
+        )
+
+        val pod: Pod = PodBuilder().withNewMetadata()
+            .withName("pod1")
+            .withResourceVersion("1")
+            .withLabels<String, String>(mapOf("app" to "pod"))
+            .withNamespace("test").and()
+            .build()
+
+        val ready: Pod = createReadyFrom(pod, "True")
+
+        val podList = PodListBuilder().build()
+        podList.items.add(0, ready)
+
+
+        server
+            .expect()
+            .withPath("/api/v1/namespaces/test/pods?labelSelector=${Utils.toUrlEncoded("app=pod1")}")
+            .andReturn(200, podList)
+            .always()
+
+        server
+            .expect()
+            .withPath("/api/v1/namespaces/test/pods?labelSelector=${Utils.toUrlEncoded("app=pod0")}")
+            .andReturn(200, emptyMap<String, String>())
+            .always()
+
+
+        server
+            .expect()
+            .get()
+            .withPath("/api/v1/namespaces/test/pods/pod1")
+            .andReturn(200, ready)
+            .always()
+
+        server
+            .expect()
+            .withPath("/api/v1/namespaces/test/pods/pod1/exec?command=ls&stdout=true&stderr=true")
+            .andUpgradeToWebSocket()
+            .open(OutputStreamMessage("Test-Output"))
+            .done()
+            .always()
+    }
+
+    @AfterEach
+    fun tearDown() {
+        server.after()
+        serverCrud.after()
+    }
+
+    /**
+     * Copied from fabric8 Kubernetes Client repository
+     *
+     * @param pod
+     * @param status
+     * @return
+     */
+    private fun createReadyFrom(pod: Pod, status: String): Pod {
+        return PodBuilder(pod)
+            .withNewStatus()
+            .addNewCondition()
+            .withType("Ready")
+            .withStatus(status)
+            .endCondition()
+            .endStatus()
+            .build()
+    }
+
+    private fun getActionSelector(label: Pair<String, String>): ActionSelector {
+        val podSelector = PodSelector()
+        val actionSelector = ActionSelector()
+        actionSelector.pod = podSelector
+
+        // pod with matching labels are deployed
+        podSelector.matchLabels = mutableMapOf(label)
+        return actionSelector
+    }
+
+    private fun createAndDeployConfigmapResourceSet(): ResourceSets {
+        // create test deployment
+        val resourceBuilder = DeploymentBuilder()
+        resourceBuilder.withNewSpec().endSpec()
+        resourceBuilder.withNewMetadata().endMetadata()
+        val resource = resourceBuilder.build()
+        resource.metadata.name = "test-deployment"
+        resource.metadata.labels = mutableMapOf("app" to "pod1")
+        val resourceString = Gson().toJson(resource)
+
+        // create and deploy configmap
+        val configMap1 = ConfigMapBuilder()
+            .withNewMetadata().withName("test-configmap").endMetadata()
+            .addToData("test-resource.yaml",resourceString)
+            .build()
+
+        serverCrud.client.configMaps().createOrReplace(configMap1)
+
+        // create configmap resource set
+        val resourceSet = ConfigMapResourceSet()
+        resourceSet.name = "test-configmap"
+
+        // create ResourceSetsList
+        val set = ResourceSets()
+        set.configMap = resourceSet
+        return set
+    }
+
+    @Test
+    fun checkIfResourceIsDeployed() {
+        // pod with matching labels are deployed
+        assertTrue(checker.checkIfResourceIsDeployed(getActionSelector("app" to "pod1")))
+
+        // no pod with matching labels are deployed
+        assertFalse(checker.checkIfResourceIsDeployed(getActionSelector("app" to "pod0")))
+    }
+
+    @Test
+    fun checkIfResourceIsInfrastructure() {
+        val resourceSets = listOf(createAndDeployConfigmapResourceSet())
+        assertTrue(checkerCrud.checkIfResourceIsInfrastructure(resourceSets, getActionSelector("app" to "pod1")))
+        assertFalse(checkerCrud.checkIfResourceIsInfrastructure(resourceSets, getActionSelector("app" to "pod0")))
+
+    }
+
+    @Test
+    fun checkResources() {
+        val benchmark = BenchmarkCRDummy(
+            name = "test-benchmark"
+        )
+        benchmark.getCR().spec.setClient(serverCrud.client)
+        val resourceSet = Resources()
+        resourceSet.resources = listOf(createAndDeployConfigmapResourceSet())
+        benchmark.getCR().spec.infrastructure = resourceSet
+        benchmark.getCR().spec.loadGenerator = resourceSet
+        benchmark.getCR().spec.sut = resourceSet
+
+        assertEquals(BenchmarkStates.READY,checkerCrud.checkResources(benchmark.getCR().spec))
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/execution/operator/ControllerTest.kt b/theodolite/src/test/kotlin/theodolite/execution/operator/ControllerTest.kt
index 7e0532aff36cac2fb1a1c718415315b8f54052c2..6ea69689847afeb8f9fc36de2944c6fdcf4702ad 100644
--- a/theodolite/src/test/kotlin/theodolite/execution/operator/ControllerTest.kt
+++ b/theodolite/src/test/kotlin/theodolite/execution/operator/ControllerTest.kt
@@ -32,10 +32,11 @@ class ControllerTest {
     @BeforeEach
     fun setUp() {
         server.before()
-        this.controller = TheodoliteOperator().getController(
+        val operator = TheodoliteOperator()
+        this.controller = operator.getController(
             client = server.client,
-            executionStateHandler = ExecutionStateHandler(server.client),
-            benchmarkStateHandler =  BenchmarkStateHandler(server.client)
+            executionStateHandler = operator.getExecutionStateHandler(client = server.client),
+            benchmarkStateChecker = operator.getBenchmarkStateChecker(client = server.client)
         )
 
         // benchmark
diff --git a/theodolite/src/test/kotlin/theodolite/execution/operator/ExecutionEventHandlerTest.kt b/theodolite/src/test/kotlin/theodolite/execution/operator/ExecutionEventHandlerTest.kt
index d8db7ab3b64ce3856984ddbc279ef148aa325e73..c850e84f225bab7fc0b5eb145f9e655567de43d0 100644
--- a/theodolite/src/test/kotlin/theodolite/execution/operator/ExecutionEventHandlerTest.kt
+++ b/theodolite/src/test/kotlin/theodolite/execution/operator/ExecutionEventHandlerTest.kt
@@ -36,8 +36,8 @@ class ExecutionEventHandlerTest {
         val operator = TheodoliteOperator()
         this.controller = operator.getController(
             client = server.client,
-            executionStateHandler = ExecutionStateHandler(client = server.client),
-            benchmarkStateHandler = BenchmarkStateHandler(client = server.client)
+            executionStateHandler = operator.getExecutionStateHandler(client = server.client),
+            benchmarkStateChecker = operator.getBenchmarkStateChecker(client = server.client)
         )
 
         this.factory = operator.getExecutionEventHandler(this.controller, server.client)