diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7d14e478aed45186eb71aa78ca65d8b54ade5856..af3ca77a897a2ddea9ee8d7c6e4e30fa7f369e94 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -131,7 +131,7 @@ lint-helm:
   script: helm lint helm/
   rules:
   - changes:
-    - helm/*
+    - helm/**/*
   - when: manual
     allow_failure: true
 
@@ -297,6 +297,34 @@ deploy-uc4-flink:
     JAVA_PROJECT_NAME: "uc4-flink"
     JAVA_PROJECT_DEPS: "flink-commons"
 
+deploy-uc1-hazelcastjet:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc1-hazelcastjet"
+    JAVA_PROJECT_NAME: "uc1-hazelcastjet"
+    JAVA_PROJECT_DEPS: "hazelcastjet-commons"
+
+deploy-uc2-hazelcastjet:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc2-hazelcastjet"
+    JAVA_PROJECT_NAME: "uc2-hazelcastjet"
+    JAVA_PROJECT_DEPS: "hazelcastjet-commons"
+
+deploy-uc3-hazelcastjet:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc3-hazelcastjet"
+    JAVA_PROJECT_NAME: "uc3-hazelcastjet"
+    JAVA_PROJECT_DEPS: "hazelcastjet-commons"
+
+deploy-uc4-hazelcastjet:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc4-hazelcastjet"
+    JAVA_PROJECT_NAME: "uc4-hazelcastjet"
+    JAVA_PROJECT_DEPS: "hazelcastjet-commons"
+
 deploy-uc1-beam-flink:
   extends: .deploy-benchmarks
   variables:
@@ -449,6 +477,15 @@ smoketest-uc1-beam-samza:
     DOCKER_COMPOSE_DIR: "uc1-beam-samza"
     JAVA_PROJECT_DEPS: "uc1-beam-samza,uc1-beam,beam-commons,uc1-load-generator,load-generator-commons"
 
+smoketest-uc1-hazelcastjet:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc1-hazelcastjet
+    - deploy-uc1-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc1-hazelcastjet"
+    JAVA_PROJECT_DEPS: "uc1-hazelcastjet,hazelcastjet-commons,uc1-load-generator,load-generator-commons"
+
 smoketest-uc2-kstreams:
   extends: .smoketest-benchmarks
   needs:
@@ -485,6 +522,15 @@ smoketest-uc2-beam-samza:
     DOCKER_COMPOSE_DIR: "uc2-beam-samza"
     JAVA_PROJECT_DEPS: "uc2-beam-samza,uc2-beam,beam-commons,uc2-load-generator,load-generator-commons"
 
+smoketest-uc2-hazelcastjet:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc2-hazelcastjet
+    - deploy-uc2-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc2-hazelcastjet"
+    JAVA_PROJECT_DEPS: "uc2-hazelcastjet,hazelcastjet-commons,uc2-load-generator,load-generator-commons"
+
 smoketest-uc3-kstreams:
   extends: .smoketest-benchmarks
   needs:
@@ -512,6 +558,15 @@ smoketest-uc3-beam-samza:
     DOCKER_COMPOSE_DIR: "uc3-beam-samza"
     JAVA_PROJECT_DEPS: "uc3-beam-samza,uc3-beam,beam-commons,uc3-load-generator,load-generator-commons"
 
+smoketest-uc3-hazelcastjet:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc3-hazelcastjet
+    - deploy-uc3-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc3-hazelcastjet"
+    JAVA_PROJECT_DEPS: "uc3-hazelcastjet,hazelcastjet-commons,uc3-load-generator,load-generator-commons"
+
 smoketest-uc4-kstreams:
   extends: .smoketest-benchmarks
   needs:
@@ -548,6 +603,14 @@ smoketest-uc4-beam-samza:
     DOCKER_COMPOSE_DIR: "uc4-beam-samza"
     JAVA_PROJECT_DEPS: "uc4-beam-samza,uc4-beam,beam-commons,uc4-load-generator,load-generator-commons"
 
+smoketest-uc4-hazelcastjet:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc4-hazelcastjet
+    - deploy-uc4-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc4-hazelcastjet"
+    JAVA_PROJECT_DEPS: "uc4-hazelcastjet,hazelcastjet-commons,uc4-load-generator,load-generator-commons"
 
 # Theodolite Framework
 
diff --git a/CITATION.cff b/CITATION.cff
index b6ca4542010b83e12206fbc0d9841683b43e1d57..160146c844b1d128299617ae8d93ac4af77e4ca0 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -1,6 +1,6 @@
 cff-version: "1.1.0"
 message: "If you use Theodolite, please cite it using these metadata."
-authors: 
+authors:
   - family-names: Henning
     given-names: "Sören"
     orcid: "https://orcid.org/0000-0001-6912-2549"
@@ -8,13 +8,13 @@ authors:
     given-names: Wilhelm
     orcid: "https://orcid.org/0000-0001-6625-4335"
 title: Theodolite
-version: "0.6.4"
+version: "0.7.0"
 repository-code: "https://github.com/cau-se/theodolite"
 license: "Apache-2.0"
 doi: "10.1016/j.bdr.2021.100209"
 preferred-citation:
   type: article
-  authors: 
+  authors:
     - family-names: Henning
       given-names: "Sören"
       orcid: "https://orcid.org/0000-0001-6912-2549"
diff --git a/codemeta.json b/codemeta.json
index 948a34628ec919e2492b61e3ac9997392dc5e030..fa2eb23a956d9ad2525beee1b5ca343845c38d2d 100644
--- a/codemeta.json
+++ b/codemeta.json
@@ -5,10 +5,10 @@
     "codeRepository": "https://github.com/cau-se/theodolite",
     "dateCreated": "2020-03-13",
     "datePublished": "2020-07-27",
-    "dateModified": "2022-01-24",
+    "dateModified": "2022-05-11",
     "downloadUrl": "https://github.com/cau-se/theodolite/releases",
     "name": "Theodolite",
-    "version": "0.6.4",
+    "version": "0.7.0",
     "description": "Theodolite is a framework for benchmarking the horizontal and vertical scalability of cloud-native applications.",
     "developmentStatus": "active",
     "relatedLink": [
diff --git a/docs/Gemfile b/docs/Gemfile
index c9ce8dff9c02691c4a6a3be9e94341745378fb0b..b2bab9c1cd6c4591d474c8545e459276692ab487 100644
--- a/docs/Gemfile
+++ b/docs/Gemfile
@@ -9,11 +9,11 @@ source "https://rubygems.org"
 # Happy Jekylling!
 #gem "jekyll", "~> 4.2.0"
 # This is the default theme for new Jekyll sites. You may change this to anything you like.
-gem "minima", "~> 2.5"
+#gem "minima", "~> 2.5"
 # If you want to use GitHub Pages, remove the "gem "jekyll"" above and
 # uncomment the line below. To upgrade, run `bundle update github-pages`.
-# gem "github-pages", group: :jekyll_plugins
-gem "github-pages", "~> 215", group: :jekyll_plugins
+gem "github-pages", group: :jekyll_plugins
+#gem "github-pages", "~> 226", group: :jekyll_plugins
 # If you have any plugins, put them here!
 #group :jekyll_plugins do
   #gem "jekyll-feed", "~> 0.12"
diff --git a/docs/Gemfile.lock b/docs/Gemfile.lock
index 2159f9d34e79005f1ae5292b5f828074b38f096e..4acb2ba79d5cde699cf9dd4d379bf17c3c93e068 100644
--- a/docs/Gemfile.lock
+++ b/docs/Gemfile.lock
@@ -1,7 +1,7 @@
 GEM
   remote: https://rubygems.org/
   specs:
-    activesupport (6.0.4)
+    activesupport (6.0.4.8)
       concurrent-ruby (~> 1.0, >= 1.0.2)
       i18n (>= 0.7, < 2)
       minitest (~> 5.1)
@@ -14,44 +14,54 @@ GEM
       execjs
     coffee-script-source (1.11.1)
     colorator (1.1.0)
-    commonmarker (0.17.13)
-      ruby-enum (~> 0.5)
-    concurrent-ruby (1.1.9)
-    dnsruby (1.61.7)
+    commonmarker (0.23.4)
+    concurrent-ruby (1.1.10)
+    dnsruby (1.61.9)
       simpleidn (~> 0.1)
-    em-websocket (0.5.2)
+    em-websocket (0.5.3)
       eventmachine (>= 0.12.9)
-      http_parser.rb (~> 0.6.0)
-    ethon (0.14.0)
+      http_parser.rb (~> 0)
+    ethon (0.15.0)
       ffi (>= 1.15.0)
     eventmachine (1.2.7)
     execjs (2.8.1)
-    faraday (1.4.3)
+    faraday (1.10.0)
       faraday-em_http (~> 1.0)
       faraday-em_synchrony (~> 1.0)
       faraday-excon (~> 1.1)
+      faraday-httpclient (~> 1.0)
+      faraday-multipart (~> 1.0)
       faraday-net_http (~> 1.0)
-      faraday-net_http_persistent (~> 1.1)
-      multipart-post (>= 1.2, < 3)
+      faraday-net_http_persistent (~> 1.0)
+      faraday-patron (~> 1.0)
+      faraday-rack (~> 1.0)
+      faraday-retry (~> 1.0)
       ruby2_keywords (>= 0.0.4)
     faraday-em_http (1.0.0)
     faraday-em_synchrony (1.0.0)
     faraday-excon (1.1.0)
+    faraday-httpclient (1.0.1)
+    faraday-multipart (1.0.3)
+      multipart-post (>= 1.2, < 3)
     faraday-net_http (1.0.1)
-    faraday-net_http_persistent (1.1.0)
-    ffi (1.15.3)
+    faraday-net_http_persistent (1.2.0)
+    faraday-patron (1.0.0)
+    faraday-rack (1.0.0)
+    faraday-retry (1.0.3)
+    ffi (1.15.5)
     forwardable-extended (2.6.0)
     gemoji (3.0.1)
-    github-pages (215)
-      github-pages-health-check (= 1.17.2)
-      jekyll (= 3.9.0)
+    github-pages (226)
+      github-pages-health-check (= 1.17.9)
+      jekyll (= 3.9.2)
       jekyll-avatar (= 0.7.0)
       jekyll-coffeescript (= 1.1.1)
-      jekyll-commonmark-ghpages (= 0.1.6)
+      jekyll-commonmark-ghpages (= 0.2.0)
       jekyll-default-layout (= 0.1.4)
       jekyll-feed (= 0.15.1)
       jekyll-gist (= 1.5.0)
       jekyll-github-metadata (= 2.13.0)
+      jekyll-include-cache (= 0.2.1)
       jekyll-mentions (= 1.6.0)
       jekyll-optional-front-matter (= 0.3.2)
       jekyll-paginate (= 1.1.0)
@@ -60,53 +70,53 @@ GEM
       jekyll-relative-links (= 0.6.1)
       jekyll-remote-theme (= 0.4.3)
       jekyll-sass-converter (= 1.5.2)
-      jekyll-seo-tag (= 2.7.1)
+      jekyll-seo-tag (= 2.8.0)
       jekyll-sitemap (= 1.4.0)
       jekyll-swiss (= 1.0.0)
-      jekyll-theme-architect (= 0.1.1)
-      jekyll-theme-cayman (= 0.1.1)
-      jekyll-theme-dinky (= 0.1.1)
-      jekyll-theme-hacker (= 0.1.2)
-      jekyll-theme-leap-day (= 0.1.1)
-      jekyll-theme-merlot (= 0.1.1)
-      jekyll-theme-midnight (= 0.1.1)
-      jekyll-theme-minimal (= 0.1.1)
-      jekyll-theme-modernist (= 0.1.1)
-      jekyll-theme-primer (= 0.5.4)
-      jekyll-theme-slate (= 0.1.1)
-      jekyll-theme-tactile (= 0.1.1)
-      jekyll-theme-time-machine (= 0.1.1)
+      jekyll-theme-architect (= 0.2.0)
+      jekyll-theme-cayman (= 0.2.0)
+      jekyll-theme-dinky (= 0.2.0)
+      jekyll-theme-hacker (= 0.2.0)
+      jekyll-theme-leap-day (= 0.2.0)
+      jekyll-theme-merlot (= 0.2.0)
+      jekyll-theme-midnight (= 0.2.0)
+      jekyll-theme-minimal (= 0.2.0)
+      jekyll-theme-modernist (= 0.2.0)
+      jekyll-theme-primer (= 0.6.0)
+      jekyll-theme-slate (= 0.2.0)
+      jekyll-theme-tactile (= 0.2.0)
+      jekyll-theme-time-machine (= 0.2.0)
       jekyll-titles-from-headings (= 0.5.3)
       jemoji (= 0.12.0)
-      kramdown (= 2.3.1)
+      kramdown (= 2.3.2)
       kramdown-parser-gfm (= 1.1.0)
       liquid (= 4.0.3)
       mercenary (~> 0.3)
       minima (= 2.5.1)
-      nokogiri (>= 1.10.4, < 2.0)
+      nokogiri (>= 1.13.4, < 2.0)
       rouge (= 3.26.0)
       terminal-table (~> 1.4)
-    github-pages-health-check (1.17.2)
+    github-pages-health-check (1.17.9)
       addressable (~> 2.3)
       dnsruby (~> 1.60)
       octokit (~> 4.0)
-      public_suffix (>= 2.0.2, < 5.0)
+      public_suffix (>= 3.0, < 5.0)
       typhoeus (~> 1.3)
-    html-pipeline (2.14.0)
+    html-pipeline (2.14.1)
       activesupport (>= 2)
       nokogiri (>= 1.4)
-    html-proofer (3.19.2)
+    html-proofer (3.19.3)
       addressable (~> 2.3)
       mercenary (~> 0.3)
-      nokogumbo (~> 2.0)
+      nokogiri (~> 1.12)
       parallel (~> 1.3)
       rainbow (~> 3.0)
       typhoeus (~> 1.3)
       yell (~> 2.0)
-    http_parser.rb (0.6.0)
+    http_parser.rb (0.8.0)
     i18n (0.9.5)
       concurrent-ruby (~> 1.0)
-    jekyll (3.9.0)
+    jekyll (3.9.2)
       addressable (~> 2.4)
       colorator (~> 1.0)
       em-websocket (~> 0.5)
@@ -124,12 +134,12 @@ GEM
     jekyll-coffeescript (1.1.1)
       coffee-script (~> 2.2)
       coffee-script-source (~> 1.11.1)
-    jekyll-commonmark (1.3.1)
-      commonmarker (~> 0.14)
-      jekyll (>= 3.7, < 5.0)
-    jekyll-commonmark-ghpages (0.1.6)
-      commonmarker (~> 0.17.6)
-      jekyll-commonmark (~> 1.2)
+    jekyll-commonmark (1.4.0)
+      commonmarker (~> 0.22)
+    jekyll-commonmark-ghpages (0.2.0)
+      commonmarker (~> 0.23.4)
+      jekyll (~> 3.9.0)
+      jekyll-commonmark (~> 1.4.0)
       rouge (>= 2.0, < 4.0)
     jekyll-default-layout (0.1.4)
       jekyll (~> 3.0)
@@ -140,6 +150,8 @@ GEM
     jekyll-github-metadata (2.13.0)
       jekyll (>= 3.4, < 5.0)
       octokit (~> 4.0, != 4.4.0)
+    jekyll-include-cache (0.2.1)
+      jekyll (>= 3.7, < 5.0)
     jekyll-mentions (1.6.0)
       html-pipeline (~> 2.3)
       jekyll (>= 3.7, < 5.0)
@@ -159,50 +171,50 @@ GEM
       rubyzip (>= 1.3.0, < 3.0)
     jekyll-sass-converter (1.5.2)
       sass (~> 3.4)
-    jekyll-seo-tag (2.7.1)
+    jekyll-seo-tag (2.8.0)
       jekyll (>= 3.8, < 5.0)
     jekyll-sitemap (1.4.0)
       jekyll (>= 3.7, < 5.0)
     jekyll-swiss (1.0.0)
-    jekyll-theme-architect (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-architect (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-cayman (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-cayman (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-dinky (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-dinky (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-hacker (0.1.2)
+    jekyll-theme-hacker (0.2.0)
       jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-leap-day (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-leap-day (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-merlot (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-merlot (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-midnight (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-midnight (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-minimal (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-minimal (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-modernist (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-modernist (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-primer (0.5.4)
+    jekyll-theme-primer (0.6.0)
       jekyll (> 3.5, < 5.0)
       jekyll-github-metadata (~> 2.9)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-slate (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-slate (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-tactile (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-tactile (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-time-machine (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-time-machine (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
     jekyll-titles-from-headings (0.5.3)
       jekyll (>= 3.3, < 5.0)
@@ -212,12 +224,12 @@ GEM
       gemoji (~> 3.0)
       html-pipeline (~> 2.2)
       jekyll (>= 3.0, < 5.0)
-    kramdown (2.3.1)
+    kramdown (2.3.2)
       rexml
     kramdown-parser-gfm (1.1.0)
       kramdown (~> 2.0)
     liquid (4.0.3)
-    listen (3.5.1)
+    listen (3.7.1)
       rb-fsevent (~> 0.10, >= 0.10.3)
       rb-inotify (~> 0.9, >= 0.9.10)
     mercenary (0.3.6)
@@ -225,30 +237,26 @@ GEM
       jekyll (>= 3.5, < 5.0)
       jekyll-feed (~> 0.9)
       jekyll-seo-tag (~> 2.1)
-    minitest (5.14.4)
+    minitest (5.15.0)
     multipart-post (2.1.1)
-    nokogiri (1.13.3-x86_64-linux)
+    nokogiri (1.13.6-x86_64-linux)
       racc (~> 1.4)
-    nokogumbo (2.0.5)
-      nokogiri (~> 1.8, >= 1.8.4)
-    octokit (4.21.0)
+    octokit (4.22.0)
       faraday (>= 0.9)
       sawyer (~> 0.8.0, >= 0.5.3)
-    parallel (1.21.0)
+    parallel (1.22.1)
     pathutil (0.16.2)
       forwardable-extended (~> 2.6)
-    public_suffix (4.0.6)
+    public_suffix (4.0.7)
     racc (1.6.0)
-    rainbow (3.0.0)
-    rb-fsevent (0.11.0)
+    rainbow (3.1.1)
+    rb-fsevent (0.11.1)
     rb-inotify (0.10.1)
       ffi (~> 1.0)
     rexml (3.2.5)
     rouge (3.26.0)
-    ruby-enum (0.9.0)
-      i18n
-    ruby2_keywords (0.0.4)
-    rubyzip (2.3.0)
+    ruby2_keywords (0.0.5)
+    rubyzip (2.3.2)
     safe_yaml (1.0.5)
     sass (3.7.4)
       sass-listen (~> 4.0.0)
@@ -269,19 +277,17 @@ GEM
       thread_safe (~> 0.1)
     unf (0.1.4)
       unf_ext
-    unf_ext (0.0.7.7)
-    unicode-display_width (1.7.0)
+    unf_ext (0.0.8.1)
+    unicode-display_width (1.8.0)
     yell (2.2.2)
-    zeitwerk (2.4.2)
+    zeitwerk (2.5.4)
 
 PLATFORMS
   x86_64-linux
-  x86_64-linux-musl
 
 DEPENDENCIES
-  github-pages (~> 215)
+  github-pages
   html-proofer
-  minima (~> 2.5)
   tzinfo (~> 1.2)
   tzinfo-data
   wdm (~> 0.1.1)
diff --git a/docs/api-reference/crds.md b/docs/api-reference/crds.md
index 782252bca9960395a17501bf9a31d6a2e60be25f..996ea1a3a4486ca27031f3d29ad64e018f66f5fb 100644
--- a/docs/api-reference/crds.md
+++ b/docs/api-reference/crds.md
@@ -146,6 +146,15 @@ Resource Types:
             <i>Default</i>: <br/>
         </td>
         <td>false</td>
+      </tr><tr>
+        <td><b>waitForResourcesEnabled</b></td>
+        <td>boolean</td>
+        <td>
+          If true, Theodolite waits to create the resource for the SUT until the infrastructure resources are ready, and analogously, Theodolite waits to create the load-gen resource until the resources of the SUT are ready.<br/>
+          <br/>
+            <i>Default</i>: false<br/>
+        </td>
+        <td>false</td>
       </tr></tbody>
 </table>
 
diff --git a/docs/creating-a-benchmark.md b/docs/creating-a-benchmark.md
index b09c989e59d910fc352af9d1c5690b224e3346e6..b4602d6f7fd209b5851a9a9db128dfce6369b2a4 100644
--- a/docs/creating-a-benchmark.md
+++ b/docs/creating-a-benchmark.md
@@ -91,10 +91,37 @@ filesystem:
   - example-service.yaml
 ```
 
-<!-- ### Before and after actions -->
+### Actions
 
+Sometimes it is not sufficient to just define resources that are created and deleted when running a benchmark. Instead, it might be necessary to define certain actions that will be executed before running or after stopping the benchmark.
 
+Theodolite allows to execute commands on running pods. This is similar to `kubectl exec` or Kubernetes' [container lifecycle handlers](https://kubernetes.io/docs/tasks/configure-pod-container/attach-handler-lifecycle-event/). Theodolite actions can run before (`beforeActions`) or after `afterActions` all `sut`, `loadGenerator` or `infrastructure` resources are deployed.
+For example, the following actions will create a file in a pod with label `app: logger` before the SUT is started and delete if after the SUT is stopped:
 
+ ```yaml
+  sut:
+    resources: # ...
+    beforeActions:
+      - selector:
+          pod:
+            matchLabels:
+              app: logger
+        exec:
+          command: ["touch", "file-used-by-logger.txt"]
+          timeoutSeconds: 90
+    afterActions:
+      - selector:
+          pod:
+            matchLabels:
+              app: logger
+        exec:
+          command: [ "rm", "file-used-by-logger.txt" ]
+          timeoutSeconds: 90
+```
+
+Theodolite checks if all referenced pods are available for the specified actions. That means these pods must either be defined in `infrastructure` or already deployed in the cluster. If not all referenced pods are available, the benchmark will not be set as `Ready`. Consequently, an action cannot be executed on a pod that is defined as an SUT or load generator resource.
+
+*Note: Actions should be used sparingly. While it is possible to define entire benchmarks imperatively as actions, it is considered better practice to define as much as possible using declarative, native Kubernetes resource files.*
 
 <!--
 A Benchmark refers to other Kubernetes resources (e.g., Deployments, Services, ConfigMaps), which describe the system under test, the load generator and infrastructure components such as a middleware used in the benchmark. To manage those resources, Theodolite needs to have access to them. This is done by bundling resources in ConfigMaps.
@@ -116,7 +143,7 @@ If a benchmark is [executed by an Execution](running-benchmarks), these patchers
 ## Kafka Configuration
 
 Theodolite allows to automatically create and remove Kafka topics for each SLO experiment by setting a `kafkaConfig`.
-It `bootstrapServer` needs to point your Kafka cluster and `topics` configures the list of Kafka topics to be created/removed.
+`bootstrapServer` needs to point your Kafka cluster and `topics` configures the list of Kafka topics to be created/removed.
 For each topic, you configure its name, the number of partitions and the replication factor.
 
 With the `removeOnly: True` property, you can also instruct Theodolite to only remove topics and not create them.
diff --git a/docs/drafts/actions.md b/docs/drafts/actions.md
deleted file mode 100644
index 8092fddb088b3fe8fc64f51bff03bb0c6504b74f..0000000000000000000000000000000000000000
--- a/docs/drafts/actions.md
+++ /dev/null
@@ -1,62 +0,0 @@
-## Infrastructure
-The necessary infrastructure for an execution can be defined in the benchmark manifests. The related resources are create *before* an execution is started, and removed *after* an execution is finished.
-
-### Example
-
-```yaml
-  infrastructure:
-    resources:
-      - configMap:
-          name: "example-configmap"
-          files:
-            - "uc1-kstreams-deployment.yaml"
-```
-
-## Action Commands
-Theodolite allows to execute commands on running pods (similar to the `kubectl exec -it <pod-name> -- <command>` command). This commands can be run either before (via so called `beforeActions`) or after (via so called `afterActions`) an experiment is executed.
-
-Theodolite checks if all required pods are available for the specified actions (i.e. the pods must either be defined as infrastructure or already deployed in the cluster). If not all pods/resources are available, the benchmark will not be set as `Ready`. Consequently, an action cannot be executed on a pod that is defined as an SUT or loadGen resource.
-
-### Example
-
-```yaml
-# For the system under test
-  sut:
-    resources: ...
-    beforeActions:
-      - selector:
-          pod:
-            matchLabels:
-              app: busybox1
-        exec:
-          command: ["touch", "test-file-sut"]
-          timeoutSeconds: 90
-    afterActions:
-      - selector:
-          pod:
-            matchLabels:
-              app: busybox1
-        exec:
-          command: [ "touch", "test-file-sut-after" ]
-          timeoutSeconds: 90
-
-# analog, for the load generator
-  loadGenerator:
-    resources: ... 
-    beforeActions:
-      - selector:
-          pod:
-            matchLabels:
-              app: busybox1
-        exec:
-          command: ["touch", "test-file-loadGen"]
-          timeoutSeconds: 90
-    afterActions:
-      - selector:
-          pod:
-            matchLabels:
-              app: busybox1
-        exec:
-          command: [ "touch", "test-file-loadGen-after" ]
-          timeoutSeconds: 90
-```
\ No newline at end of file
diff --git a/docs/index.yaml b/docs/index.yaml
index 3e0de103a78f3529d314727ed59be3dcdc333fc9..956bb83b19ebbae4cddc4da6f07a0d937cf3dc2d 100644
--- a/docs/index.yaml
+++ b/docs/index.yaml
@@ -1,6 +1,41 @@
 apiVersion: v1
 entries:
   theodolite:
+  - apiVersion: v2
+    appVersion: 0.7.0
+    created: "2022-05-11T13:49:02.491041789+02:00"
+    dependencies:
+    - condition: grafana.enabled
+      name: grafana
+      repository: https://grafana.github.io/helm-charts
+      version: 6.17.5
+    - condition: kube-prometheus-stack.enabled
+      name: kube-prometheus-stack
+      repository: https://prometheus-community.github.io/helm-charts
+      version: 20.0.1
+    - condition: cp-helm-charts.enabled
+      name: cp-helm-charts
+      repository: https://soerenhenning.github.io/cp-helm-charts
+      version: 0.6.0
+    - condition: strimzi.enabled
+      name: strimzi-kafka-operator
+      repository: https://strimzi.io/charts/
+      version: 0.28.0
+    description: Theodolite is a framework for benchmarking the horizontal and vertical
+      scalability of cloud-native applications.
+    digest: af10134baa30bb07423f78240fe1c609381e1c616585883cf5d3aded2d86a2b1
+    home: https://www.theodolite.rocks
+    maintainers:
+    - email: soeren.henning@email.uni-kiel.de
+      name: Sören Henning
+      url: https://www.se.informatik.uni-kiel.de/en/team/soeren-henning-m-sc
+    name: theodolite
+    sources:
+    - https://github.com/cau-se/theodolite
+    type: application
+    urls:
+    - https://github.com/cau-se/theodolite/releases/download/v0.7.0/theodolite-0.7.0.tgz
+    version: 0.7.0
   - apiVersion: v2
     appVersion: 0.6.4
     created: "2022-02-16T16:09:11.967649304+01:00"
@@ -316,4 +351,4 @@ entries:
     urls:
     - https://github.com/cau-se/theodolite/releases/download/v0.4.0/theodolite-0.4.0.tgz
     version: 0.4.0
-generated: "2022-02-16T16:09:11.93111234+01:00"
+generated: "2022-05-11T13:49:02.423487026+02:00"
diff --git a/helm/Chart.yaml b/helm/Chart.yaml
index 973c985b5bdaa4d53390954017ed9176bb396f55..5d707a70599b8a26ce828c94223ac20903f71848 100644
--- a/helm/Chart.yaml
+++ b/helm/Chart.yaml
@@ -30,6 +30,6 @@ dependencies:
     condition: strimzi.enabled
 
 
-version: 0.7.0-SNAPSHOT
+version: 0.8.0-SNAPSHOT
 
-appVersion: 0.7.0-SNAPSHOT
+appVersion: 0.8.0-SNAPSHOT
diff --git a/helm/templates/kafka/kafka-cluster.yaml b/helm/templates/kafka/kafka-cluster.yaml
index 29cf038f12aa6ee38b21697b8d79b5aea378c7d8..f1a58077a78865c624706531b58c3150feeb83ae 100644
--- a/helm/templates/kafka/kafka-cluster.yaml
+++ b/helm/templates/kafka/kafka-cluster.yaml
@@ -30,6 +30,20 @@ spec:
         configMapKeyRef:
           name: {{ template "theodolite.fullname" . }}-kafka-metrics
           key: kafka-metrics-config.yml
+    {{- with .Values.strimzi.kafka.nodeSelectorTerms}}
+    template:
+      pod:
+        affinity:
+          nodeAffinity:
+            requiredDuringSchedulingIgnoredDuringExecution:
+              nodeSelectorTerms:
+                {{- toYaml . | nindent 16 }}
+    {{- end}}
+    {{- with .Values.strimzi.kafka.resources}}
+    resources:
+      {{- toYaml . | nindent 6 }}
+    {{- end}}
+
 
   zookeeper:
     {{- with .Values.strimzi.zookeeper.replicas }}
@@ -37,7 +51,16 @@ spec:
       {{- toYaml . | nindent 6 }}
     {{- end }}
     storage:
-      type: ephemeral 
+      type: ephemeral
+    {{- with .Values.strimzi.zookeeper.nodeSelectorTerms}}
+    template:
+      pod:
+        affinity:
+          nodeAffinity:
+            requiredDuringSchedulingIgnoredDuringExecution:
+              nodeSelectorTerms:
+                {{- toYaml . | nindent 16 }}
+    {{- end}}
 
   kafkaExporter: {}
 
diff --git a/helm/templates/prometheus/operator-role-binding.yaml b/helm/templates/prometheus/operator-role-binding.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0fadc048940ca5254e7ce2867a657361f32ef3b7
--- /dev/null
+++ b/helm/templates/prometheus/operator-role-binding.yaml
@@ -0,0 +1,16 @@
+{{- if not (index .Values "kube-prometheus-stack" "global" "rbac" "create") -}}
+apiVersion: rbac.authorization.k8s.io/v1
+kind: RoleBinding
+metadata:
+  name: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+  labels:
+    app: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+roleRef:
+  apiGroup: rbac.authorization.k8s.io
+  kind: Role
+  name: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+subjects:
+- kind: ServiceAccount
+  name: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+  namespace: {{ .Release.Namespace }}
+{{- end }}
diff --git a/helm/templates/prometheus/operator-role.yaml b/helm/templates/prometheus/operator-role.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..16481a2ec6a4387e589079433490ac5d437dc04b
--- /dev/null
+++ b/helm/templates/prometheus/operator-role.yaml
@@ -0,0 +1,79 @@
+{{- if not (index .Values "kube-prometheus-stack" "global" "rbac" "create") -}}
+apiVersion: rbac.authorization.k8s.io/v1
+kind: Role
+metadata:
+  name: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+  labels:
+    app: {{ template "theodolite.name" . }}-kube-prometheus-operator
+rules:
+- apiGroups:
+  - monitoring.coreos.com
+  resources:
+  - alertmanagers
+  - alertmanagers/finalizers
+  - alertmanagerconfigs
+  - prometheuses
+  - prometheuses/finalizers
+  - thanosrulers
+  - thanosrulers/finalizers
+  - servicemonitors
+  - podmonitors
+  - probes
+  - prometheusrules
+  verbs:
+  - '*'
+- apiGroups:
+  - apps
+  resources:
+  - statefulsets
+  verbs:
+  - '*'
+- apiGroups:
+  - ""
+  resources:
+  - configmaps
+  - secrets
+  verbs:
+  - '*'
+- apiGroups:
+  - ""
+  resources:
+  - pods
+  verbs:
+  - list
+  - delete
+- apiGroups:
+  - ""
+  resources:
+  - services
+  - services/finalizers
+  - endpoints
+  verbs:
+  - get
+  - create
+  - update
+  - delete
+- apiGroups:
+  - ""
+  resources:
+  - nodes
+  verbs:
+  - list
+  - watch
+- apiGroups:
+  - ""
+  resources:
+  - namespaces
+  verbs:
+  - get
+  - list
+  - watch
+- apiGroups:
+  - networking.k8s.io
+  resources:
+  - ingresses
+  verbs:
+  - get
+  - list
+  - watch
+{{- end }}
diff --git a/helm/templates/prometheus/cluster-role-binding.yaml b/helm/templates/prometheus/role-binding.yaml
similarity index 75%
rename from helm/templates/prometheus/cluster-role-binding.yaml
rename to helm/templates/prometheus/role-binding.yaml
index f2f167b94b79ad4db130565777cb8af486762c8c..722f806e0621a5775083f74f064e0c9eae18f1d8 100644
--- a/helm/templates/prometheus/cluster-role-binding.yaml
+++ b/helm/templates/prometheus/role-binding.yaml
@@ -1,14 +1,14 @@
-{{- if .Values.prometheus.clusterRoleBinding.enabled -}}
+{{- if .Values.prometheus.roleBinding.enabled -}}
 apiVersion: rbac.authorization.k8s.io/v1
-kind: ClusterRoleBinding
+kind: RoleBinding
 metadata:
   name: {{ template "theodolite.fullname" . }}-prometheus
 roleRef:
   apiGroup: rbac.authorization.k8s.io
-  kind: ClusterRole
+  kind: Role
   name: {{ template "theodolite.fullname" . }}-prometheus
 subjects:
 - kind: ServiceAccount
   name: {{ template "theodolite.fullname" . }}-prometheus
   namespace: {{ .Release.Namespace }}
-{{- end}}
\ No newline at end of file
+{{- end}}
diff --git a/helm/templates/prometheus/cluster-role.yaml b/helm/templates/prometheus/role.yaml
similarity index 67%
rename from helm/templates/prometheus/cluster-role.yaml
rename to helm/templates/prometheus/role.yaml
index c2fea2205451e01474d1ab7ef1ca342a9d975dc9..321d2825b5b98d31fc34619e88994058bac74cab 100644
--- a/helm/templates/prometheus/cluster-role.yaml
+++ b/helm/templates/prometheus/role.yaml
@@ -1,12 +1,11 @@
-{{- if .Values.prometheus.clusterRole.enabled -}}
+{{- if .Values.prometheus.role.enabled -}}
 apiVersion: rbac.authorization.k8s.io/v1
-kind: ClusterRole
+kind: Role
 metadata:
   name: {{ template "theodolite.fullname" . }}-prometheus
 rules:
 - apiGroups: [""]
   resources:
-  - nodes
   - services
   - endpoints
   - pods
@@ -15,6 +14,4 @@ rules:
   resources:
   - configmaps
   verbs: ["get"]
-- nonResourceURLs: ["/metrics"]
-  verbs: ["get"]
-{{- end }}
\ No newline at end of file
+{{- end}}
diff --git a/helm/templates/theodolite/role.yaml b/helm/templates/theodolite/role.yaml
index 8b3961a33bd90f81af29b5adde9da449c6a462d8..b8d4d2d005d5a969c2c72cdca145f829d748e419 100644
--- a/helm/templates/theodolite/role.yaml
+++ b/helm/templates/theodolite/role.yaml
@@ -55,6 +55,9 @@ rules:
     - get
     - create
     - update
+  {{- with .Values.rbac.additionalRules }}
+{{ toYaml . | indent 2 }}
+  {{- end }}
   {{- if .Values.operator.enabled }}
   - apiGroups:
     - theodolite.com
diff --git a/helm/values.yaml b/helm/values.yaml
index 188332ef148e3e0e5a8b995fde3c8921581f718b..34a32ce60927a751f645b7f8ff4af46793865797 100644
--- a/helm/values.yaml
+++ b/helm/values.yaml
@@ -5,7 +5,7 @@
 kafkaClient:
   enabled: false
   nodeSelector: {}
-  
+
 
 ####
 ## configuration of sub charts
@@ -52,6 +52,9 @@ grafana:
   service:
     nodePort: 31199
     type: NodePort
+  rbac:
+    pspEnabled: false
+    namespaced: true
 
 
 ###
@@ -155,6 +158,9 @@ cp-helm-charts:
   ## The interval between refreshing metrics
   pollIntervalSeconds: 15
 
+strimzi-kafka-operator:
+  createGlobalResources: true
+
 strimzi:
   enabled: true
   kafka:
@@ -173,6 +179,9 @@ strimzi:
     jvmOptions:
       "-Xmx": "512M"
       "-Xms": "512M"
+    nodeSelectorTerms: []
+    resources: {}
+    
   zookeeper:
     replicas: 3
     zooEntrance:
@@ -180,6 +189,8 @@ strimzi:
       zookeeperClient:
         enabled: true
         nodeSelector: {}
+    nodeSelectorTerms: []
+  
   topicOperator:
     enabled: true
 
@@ -188,6 +199,10 @@ strimzi:
 # Prometheus Monitoring Stack (Prometheus Operator)
 ###
 kube-prometheus-stack:
+  global:
+    rbac:
+      create: false
+
   commonLabels:
     appScope: titan-ccp
   
@@ -233,7 +248,14 @@ kube-prometheus-stack:
       releaseNamespace: true
       additional: []
     nodeSelector: {}
+    admissionWebhooks:
+      enabled: false
+    tls:
+      enabled: false
+    serviceAccount:
+      create: true
   
+  # We use our own Prometheus
   prometheus:
     enabled: false
 
@@ -245,12 +267,11 @@ prometheus:
   enabled: true
   nodeSelector: {}
   
-  # depends on your cluster security and permission settings, you may need to create the following resources
   serviceAccount:
     enabled: true
-  clusterRole:
+  role:
     enabled: true
-  clusterRoleBinding:
+  roleBinding:
     enabled: true
 
 ###
@@ -341,9 +362,10 @@ serviceAccount:
 
 rbac:
   create: true
+  additionalRules: []
 
 randomScheduler:
-  enabled: true
+  enabled: false
   image: ghcr.io/cau-se/theodolite-random-scheduler
   imageTag: latest
   imagePullPolicy: Always
diff --git a/slo-checker/record-lag/app/main.py b/slo-checker/record-lag/app/main.py
index 2e38354d45df57087a94e57d5c9ca412ed5534d3..bb68580a638a40bc7ae975594b859d10784adc67 100644
--- a/slo-checker/record-lag/app/main.py
+++ b/slo-checker/record-lag/app/main.py
@@ -24,7 +24,7 @@ elif os.getenv('LOG_LEVEL') == 'DEBUG':
 def calculate_slope_trend(results, warmup):
     d = []
     for result in results:
-        group = result['metric']['consumergroup']
+        group = result['metric'].get('consumergroup', "default")
         for value in result['values']:
             d.append({'group': group, 'timestamp': int(
                 value[0]), 'value': int(value[1]) if value[1] != 'NaN' else 0})
diff --git a/theodolite-benchmarks/beam-commons/src/main/java/rocks/theodolite/benchmarks/commons/beam/BeamService.java b/theodolite-benchmarks/beam-commons/src/main/java/rocks/theodolite/benchmarks/commons/beam/BeamService.java
index a4a8f69d74f32697d8e43d58bc5765631fea63de..0165fa644e1853353e73caeaf0b9d2df0f8e9aea 100644
--- a/theodolite-benchmarks/beam-commons/src/main/java/rocks/theodolite/benchmarks/commons/beam/BeamService.java
+++ b/theodolite-benchmarks/beam-commons/src/main/java/rocks/theodolite/benchmarks/commons/beam/BeamService.java
@@ -1,7 +1,9 @@
 package rocks.theodolite.benchmarks.commons.beam;
 
+import java.io.IOException;
 import java.util.function.Function;
 import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.PipelineResult;
 import org.apache.beam.sdk.PipelineRunner;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
@@ -23,6 +25,7 @@ public class BeamService {
 
   private final AbstractPipelineFactory pipelineFactory;
   private final PipelineOptions pipelineOptions;
+  private PipelineResult pipelineResult;
 
   /**
    * Create a new {@link BeamService}.
@@ -43,14 +46,43 @@ public class BeamService {
   }
 
   /**
-   * Start this microservice, by running the underlying Beam pipeline.
+   * Start this microservice by running the underlying Beam pipeline.
    */
   public void run() {
     LOGGER.info("Constructing Beam pipeline with pipeline options: {}",
         this.pipelineOptions.toString());
     final Pipeline pipeline = this.pipelineFactory.create(this.pipelineOptions);
     LOGGER.info("Starting BeamService {}.", this.applicationName);
-    pipeline.run().waitUntilFinish();
+    this.pipelineResult = pipeline.run();
+  }
+
+  /**
+   * Start this microservice by running the underlying Beam pipeline and block until this process is
+   * terminated.
+   */
+  public void runStandalone() {
+    this.run();
+    Runtime.getRuntime().addShutdownHook(new Thread(() -> this.stop()));
+    this.pipelineResult.waitUntilFinish();
+  }
+
+  /**
+   * Stop this microservice by canceling the underlying Beam pipeline.
+   */
+  public void stop() {
+    LOGGER.info("Initiate shutdown of Beam service {}.", this.applicationName);
+    if (this.pipelineResult == null) {
+      throw new IllegalStateException("Cannot stop service since it has never been started.");
+    }
+    LOGGER.info("Stopping Beam pipeline.");
+    try {
+      this.pipelineResult.cancel();
+      this.pipelineResult = null; // NOPMD use null to indicate absence
+    } catch (final IOException e) {
+      throw new IllegalStateException(
+          "Stopping the service failed due to failed stop of Beam pipeline.", e);
+    }
+    LOGGER.info("Shutdown of Beam service {} complete.", this.applicationName);
   }
 
 }
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..b092c97cf0e79895d4d6aafc594979b8f48dd167
--- /dev/null
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle
@@ -0,0 +1,45 @@
+plugins {
+  // common java conventions
+  id 'theodolite.java-conventions'
+
+  // make executable
+  id 'application'
+}
+
+repositories {
+  mavenCentral()
+  maven {
+    url "https://oss.sonatype.org/content/repositories/snapshots/"
+  }
+  maven {
+    url 'https://packages.confluent.io/maven/'
+  }
+}
+
+dependencies {
+  implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
+  implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
+  implementation 'com.google.guava:guava:24.1-jre'
+  implementation 'org.slf4j:slf4j-api:1.7.30'
+  implementation 'org.slf4j:slf4j-simple:1.7.30'
+
+
+  implementation 'io.confluent:kafka-avro-serializer:5.3.0'
+
+  implementation 'com.hazelcast.jet:hazelcast-jet:4.5'
+  implementation 'com.hazelcast.jet:hazelcast-jet-kafka:4.5'
+  implementation 'com.hazelcast:hazelcast-kubernetes:2.2.2'
+ 	
+  implementation project(':hazelcastjet-commons')
+
+  testImplementation("junit:junit:4.13.2")
+  testRuntimeOnly("org.junit.vintage:junit-vintage-engine:5.8.2")
+
+  testImplementation 'com.hazelcast:hazelcast:4.2:tests'
+  testImplementation 'com.hazelcast.jet:hazelcast-jet-core:4.5:tests'
+
+}
+
+test {
+  useJUnitPlatform()
+}
diff --git a/theodolite-benchmarks/definitions/install-configmaps.sh b/theodolite-benchmarks/definitions/install-configmaps.sh
index 03d87bf971e98693be7936368421c91a83fd16bb..e6630ca08154631a395c151fac376859fc885495 100755
--- a/theodolite-benchmarks/definitions/install-configmaps.sh
+++ b/theodolite-benchmarks/definitions/install-configmaps.sh
@@ -4,6 +4,12 @@ kubectl create configmap benchmark-resources-uc2-flink --from-file uc2-flink/res
 kubectl create configmap benchmark-resources-uc3-flink --from-file uc3-flink/resources
 kubectl create configmap benchmark-resources-uc4-flink --from-file uc4-flink/resources
 
+kubectl create configmap benchmark-resources-uc1-hazelcastjet --from-file uc1-hazelcastjet/resources
+kubectl create configmap benchmark-resources-uc2-hazelcastjet --from-file uc2-hazelcastjet/resources
+kubectl create configmap benchmark-resources-uc3-hazelcastjet --from-file uc3-hazelcastjet/resources
+kubectl create configmap benchmark-resources-uc4-hazelcastjet --from-file uc4-hazelcastjet/resources
+
+
 # Kafka Streams
 kubectl create configmap benchmark-resources-uc1-kstreams --from-file uc1-kstreams/resources
 kubectl create configmap benchmark-resources-uc2-kstreams --from-file uc2-kstreams/resources
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..cab5a625c323628ec6d6a7152e53d3ff8393a8ba
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml
@@ -0,0 +1,36 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc1-hazelcastjet
+          imagePullPolicy: "Never"
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "theodolite-kafka-kafka-bootstrap:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://theodolite-cp-schema-registry:8081"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-service.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..845ce7dd55c6e5d45724ec1eeabf8789e704fe77
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-benchmark-operator.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ca8994c40350ed55e2a6b927c370b3d11a6d4bfa
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-benchmark-operator.yaml
@@ -0,0 +1,42 @@
+apiVersion: theodolite.com/v1
+kind: benchmark
+metadata:
+  name: uc1-hazelcastjet
+spec:
+  sut:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc1-hazelcastjet"
+          files:
+          - "uc1-hazelcastjet-deployment.yaml"
+          - "uc1-hazelcastjet-service.yaml"
+  loadGenerator:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc1-load-generator"
+          files:
+          - "uc1-load-generator-deployment.yaml"
+          - "uc1-load-generator-service.yaml"
+  resourceTypes:
+    - typeName: "Instances"
+      patchers:
+        - type: "ReplicaPatcher"
+          resource: "uc1-hazelcastjet-deployment.yaml"
+  loadTypes:
+    - typeName: "NumSensors"
+      patchers:
+        - type: "EnvVarPatcher"
+          resource: "uc1-load-generator-deployment.yaml"
+          properties:
+            container: "workload-generator"
+            variableName: "NUM_SENSORS"
+        - type: NumSensorsLoadGeneratorReplicaPatcher
+          resource: "uc1-load-generator-deployment.yaml"
+          properties:
+            loadGenMaxRecords: "150000"
+  kafkaConfig:
+    bootstrapServer: "theodolite-kafka-kafka-bootstrap:9092"
+    topics:
+      - name: "input"
+        numPartitions: 40
+        replicationFactor: 1
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0f5000902ddf9c12f67643cc35ffc3c882970a72
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml
@@ -0,0 +1,40 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc2-hazelcastjet
+          imagePullPolicy: "Never"
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "theodolite-kafka-kafka-bootstrap:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://theodolite-cp-schema-registry:8081"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: DOWNSAMPLE_INTERVAL
+              value: "5000"
+            #- name: KUBERNETES_DNS_NAME
+            #  value: "titan-ccp-aggregation"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-service.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..845ce7dd55c6e5d45724ec1eeabf8789e704fe77
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..436bcc790c50c86e96b3b1853b198a0f6da1aec9
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml
@@ -0,0 +1,47 @@
+apiVersion: theodolite.com/v1
+kind: benchmark
+metadata:
+  name: uc2-hazelcastjet
+spec:
+  sut:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc2-hazelcastjet"
+          files:
+            - "uc2-hazelcastjet-deployment.yaml"
+            - "uc2-hazelcastjet-service.yaml"
+  loadGenerator:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc2-load-generator"
+          files:
+            - "uc2-load-generator-deployment.yaml"
+            - "uc2-load-generator-service.yaml"
+  resourceTypes:
+    - typeName: "Instances"
+      patchers:
+        - type: "ReplicaPatcher"
+          resource: "uc2-hazelcastjet-deployment.yaml"
+  loadTypes:
+    - typeName: "NumSensors"
+      patchers:
+        - type: "EnvVarPatcher"
+          resource: "uc2-load-generator-deployment.yaml"
+          properties:
+            container: "workload-generator"
+            variableName: "NUM_SENSORS"
+        - type: NumSensorsLoadGeneratorReplicaPatcher
+          resource: "uc2-load-generator-deployment.yaml"
+          properties:
+            loadGenMaxRecords: "150000"
+  kafkaConfig:
+    bootstrapServer: "theodolite-kafka-kafka-bootstrap:9092"
+    topics:
+      - name: "input"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "output"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "theodolite-.*"
+        removeOnly: True
diff --git a/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml
index 620e9d89fb7aba54de9c3a7874dd804050c36191..f1c56b3a51ec884dca25a31ffafea195919a02e2 100644
--- a/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc3-flink/resources/jobmanager-deployment.yaml
@@ -46,7 +46,7 @@ spec:
             limits:
               memory: 4Gi
               cpu: 1000m
-          args: ["standalone-job", "--job-classname", "rocks.theodolite.benchmarks.uc3.flinks.uc3.flink.HistoryServiceFlinkJob"] # optional arguments: ["--job-id", "<job id>", "--fromSavepoint", "/path/to/savepoint", "--allowNonRestoredState"]
+          args: ["standalone-job", "--job-classname", "rocks.theodolite.benchmarks.uc3.flink.HistoryServiceFlinkJob"] # optional arguments: ["--job-id", "<job id>", "--fromSavepoint", "/path/to/savepoint", "--allowNonRestoredState"]
           #command: ['sleep', '60m']
           ports:
             - containerPort: 6123
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..317d30328ec9ba3c0c30bf87733b3801e73d2477
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml
@@ -0,0 +1,42 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc3-hazelcastjet
+          imagePullPolicy: "Never"
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "theodolite-kafka-kafka-bootstrap:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://theodolite-cp-schema-registry:8081"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: WINDOW_SIZE_IN_SECONDS
+              value: "50"
+            - name: HOPPING_SIZE_IN_SECONDS
+              value: "1"
+            #- name: KUBERNETES_DNS_NAME
+            #  value: "titan-ccp-aggregation"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-service.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..845ce7dd55c6e5d45724ec1eeabf8789e704fe77
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-benchmark-operator.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..3d9f755dc741458b0c8a27fe5ef450b09478b8cb
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-benchmark-operator.yaml
@@ -0,0 +1,47 @@
+apiVersion: theodolite.com/v1
+kind: benchmark
+metadata:
+  name: uc3-hazelcastjet
+spec:
+  sut:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc3-hazelcastjet"
+          files:
+            - "uc3-hazelcastjet-deployment.yaml"
+            - "uc3-hazelcastjet-service.yaml"
+  loadGenerator:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc3-load-generator"
+          files:
+            - "uc3-load-generator-deployment.yaml"
+            - "uc3-load-generator-service.yaml"
+  resourceTypes:
+    - typeName: "Instances"
+      patchers:
+        - type: "ReplicaPatcher"
+          resource: "uc3-hazelcastjet-deployment.yaml"
+  loadTypes:
+    - typeName: "NumSensors"
+      patchers:
+        - type: "EnvVarPatcher"
+          resource: "uc3-load-generator-deployment.yaml"
+          properties:
+            container: "workload-generator"
+            variableName: "NUM_SENSORS"
+        - type: NumSensorsLoadGeneratorReplicaPatcher
+          resource: "uc3-load-generator-deployment.yaml"
+          properties:
+            loadGenMaxRecords: "150000"
+  kafkaConfig:
+    bootstrapServer: "theodolite-kafka-kafka-bootstrap:9092"
+    topics:
+      - name: "input"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "output"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "theodolite-.*"
+        removeOnly: True
diff --git a/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml
index d1ebf745537bd233ba6e702b2cc4cd919103e7b7..d7037e2c579d82485bb31c53f132d7938f424b38 100644
--- a/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-flink/resources/jobmanager-deployment.yaml
@@ -46,7 +46,7 @@ spec:
             limits:
               memory: 4Gi
               cpu: 1000m
-          args: ["standalone-job", "--job-classname", "rocks.theodolite.benchmarks.uc4.flinks.uc4.flink.AggregationServiceFlinkJob"] # optional arguments: ["--job-id", "<job id>", "--fromSavepoint", "/path/to/savepoint", "--allowNonRestoredState"]
+          args: ["standalone-job", "--job-classname", "rocks.theodolite.benchmarks.uc4.flink.AggregationServiceFlinkJob"] # optional arguments: ["--job-id", "<job id>", "--fromSavepoint", "/path/to/savepoint", "--allowNonRestoredState"]
           #command: ['sleep', '60m']
           ports:
             - containerPort: 6123
diff --git a/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
index 7db560db53bac827dd92386e0de5621a2b911e35..cc1efa23c32220c7c664d8aaa4669f3af6492d15 100644
--- a/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: taskmanager
-          image: ghcr.io/cau-se/theodolite-uc4-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc3-flink:latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f0736e585400d26481272d5b3d75cd216d55527d
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml
@@ -0,0 +1,40 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc4-hazelcastjet
+          imagePullPolicy: "Never"
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "theodolite-kafka-kafka-bootstrap:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://theodolite-cp-schema-registry:8081"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: WINDOW_SIZE
+              value: "5000"
+            #- name: KUBERNETES_DNS_NAME
+            #  value: "titan-ccp-aggregation"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-service.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..845ce7dd55c6e5d45724ec1eeabf8789e704fe77
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-benchmark-operator.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f0151969ccb2ff34558c4a56c78f17db9fd3678e
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-benchmark-operator.yaml
@@ -0,0 +1,54 @@
+apiVersion: theodolite.com/v1
+kind: benchmark
+metadata:
+  name: uc4-hazelcastjet
+spec:
+  sut:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc4-hazelcastjet"
+          files:
+            - "uc4-hazelcastjet-deployment.yaml"
+            - "uc4-hazelcastjet-service.yaml"
+  loadGenerator:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc4-load-generator"
+          files:
+            - "uc4-load-generator-deployment.yaml"
+            - "uc4-load-generator-service.yaml"
+  resourceTypes:
+    - typeName: "Instances"
+      patchers:
+        - type: "ReplicaPatcher"
+          resource: "uc4-hazelcastjet-deployment.yaml"
+  loadTypes:
+    - typeName: "NumNestedGroups"
+      patchers:
+        - type: "EnvVarPatcher"
+          resource: "uc4-load-generator-deployment.yaml"
+          properties:
+            container: "workload-generator"
+            variableName: "NUM_SENSORS"
+        - type: NumNestedGroupsLoadGeneratorReplicaPatcher
+          resource: "uc4-load-generator-deployment.yaml"
+          properties:
+            loadGenMaxRecords: "150000"
+            numSensors: "4.0"
+  kafkaConfig:
+    bootstrapServer: "theodolite-kafka-kafka-bootstrap:9092"
+    topics:
+      - name: "input"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "output"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "configuration"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "aggregation-feedback"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "theodolite-.*"
+        removeOnly: True
diff --git a/theodolite-benchmarks/docker-test/uc1-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-hazelcastjet/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..fd91f14e241f952dc0d0ba3b4230db2f5deffa30
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-hazelcastjet/docker-compose.yml
@@ -0,0 +1,63 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
+  schema-registry:
+    image: confluentinc/cp-schema-registry:7.0.1
+    depends_on:
+      - zookeeper
+      - kafka
+    restart: "on-failure"
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc1-hazelcastjet:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    expose:
+      - 5701
+    #ports:
+    #  - 5701:5701
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc1-hazelcastjet/test.sh b/theodolite-benchmarks/docker-test/uc1-hazelcastjet/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3dbf43a74a9ad17784ca9e4e476dc70ed11c731f
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-hazelcastjet/test.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+sleep 55s # to let the benchmark and produce some output
+
+docker-compose logs --tail 100 benchmark |
+    sed -n "s/^.*Record:\s\(\S*\)$/\1/p" |
+    tee /dev/stderr |
+    jq .identifier |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml
index c85ce305c2f1383a77d4c405d52089ec1d2b02a6..a673db5ced5b834632a1bca6f3fb4a2da2b68296 100755
--- a/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml
@@ -38,7 +38,7 @@ services:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
       SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
-    image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:latest
+    image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
diff --git a/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..92b90823e31f79e68b301ff039618c9520c92019
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml
@@ -0,0 +1,64 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
+  schema-registry:
+    image: confluentinc/cp-schema-registry:7.0.1
+    depends_on:
+      - zookeeper
+      - kafka
+    restart: "on-failure"
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc2-hazelcastjet:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    expose:
+      - 5701
+    #ports:
+    #  - 5701:5701
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      DOWNSAMPLE_INTERVAL: 5000
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc2-hazelcastjet/test.sh b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..be411755b74249d90756e445f7e67dc07bf5ebab
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 20 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
\ No newline at end of file
diff --git a/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..d7c3fe7a017c24e0b212661f0b0b34c2a1fee32c
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml
@@ -0,0 +1,65 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
+  schema-registry:
+    image: confluentinc/cp-schema-registry:7.0.1
+    depends_on:
+      - zookeeper
+      - kafka
+    restart: "on-failure"
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc3-hazelcastjet:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    expose:
+      - 5701
+    #ports:
+    #  - 5701:5701
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      WINDOW_SIZE_IN_SECONDS: 50
+      HOPPING_SIZE_IN_SECONDS: 5
+  load-generator:
+    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc3-hazelcastjet/test.sh b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..217a730f73fa1fee3f875da34edd9047ed9221db
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 600 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
\ No newline at end of file
diff --git a/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..9a254f5228677322ed98afcd73349cf7a50d80bc
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml
@@ -0,0 +1,65 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
+  schema-registry:
+    image: confluentinc/cp-schema-registry:7.0.1
+    depends_on:
+      - zookeeper
+      - kafka
+    restart: "on-failure"
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc1-hazelcastjet:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    expose:
+      - 5701
+    #ports:
+    #  - 5701:5701
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      WINDOW_SIZE_UC4: 5000
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 4
+      NUM_NESTED_GROUPS: 3
diff --git a/theodolite-benchmarks/docker-test/uc4-hazelcastjet/test.sh b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..d9bb6ccf241935c39df63ea5e2f0fce02476e976
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=avro -r http://schema-registry:8081 -f '%k:%s\n' -c 600 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l|
+    grep "\b21\b"
\ No newline at end of file
diff --git a/theodolite-benchmarks/hazelcastjet-commons/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/hazelcastjet-commons/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/hazelcastjet-commons/build.gradle b/theodolite-benchmarks/hazelcastjet-commons/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..934ebc5fae39eea90a1c0ab47f989ee3cc59d6f9
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/build.gradle
@@ -0,0 +1,23 @@
+plugins {
+    id 'theodolite.java-commons'
+}
+
+repositories {
+  mavenCentral()
+  maven {
+    url "https://oss.sonatype.org/content/repositories/snapshots/"
+  }
+  maven {
+      url 'https://packages.confluent.io/maven/'
+  }
+}
+
+dependencies {
+    implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
+    implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
+    implementation 'com.hazelcast.jet:hazelcast-jet:4.5'
+    implementation 'com.hazelcast.jet:hazelcast-jet-kafka:4.5'
+    implementation 'com.hazelcast:hazelcast-kubernetes:2.2.2'
+    implementation 'io.confluent:kafka-avro-serializer:5.3.0'
+    implementation 'org.slf4j:slf4j-api:1.7.25'
+}
\ No newline at end of file
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/BenchmarkConfigBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/BenchmarkConfigBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..46dd56a8669cce4d29e8dace1bd6c2649a71e1f0
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/BenchmarkConfigBuilder.java
@@ -0,0 +1,71 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+import com.hazelcast.config.Config;
+import com.hazelcast.config.JoinConfig;
+import org.slf4j.Logger;
+
+/**
+ * Build a Config Object for Benchmarks implemented in Hazelcast Jet.
+ *
+ */
+public class BenchmarkConfigBuilder {
+
+  /**
+   * Builds a Config Object for Benchmarks implemented in Hazelcast Jet using data from the
+   * environment.
+   */
+  public Config buildFromEnv(final Logger logger, final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+
+    final String bootstrapServer = System.getenv(ConfigurationKeys.BOOTSTRAP_SERVER);
+    final String kubernetesDnsName = System.getenv(ConfigurationKeys.KUBERNETES_DNS_NAME);
+
+    ClusterConfig clusterConfig;
+    if (bootstrapServer != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromBootstrapServer(bootstrapServer);
+      logger.info("Use bootstrap server '{}'.", bootstrapServer);
+    } else if (kubernetesDnsName != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromKubernetesDnsName(kubernetesDnsName);
+      logger.info("Use Kubernetes DNS name '{}'.", kubernetesDnsName);
+    } else {
+      clusterConfig = ClusterConfig.fromBootstrapServer(bootstrapServerDefault);
+      logger.info(
+          "Neither a bootstrap server nor a Kubernetes DNS name was provided. Use default bootstrap server '{}'.", // NOCS
+          bootstrapServerDefault);
+    }
+
+    final String port = System.getenv(ConfigurationKeys.PORT);
+    if (port != null) {
+      clusterConfig.setPort(Integer.parseInt(port));
+    }
+
+    final String portAutoIncrement = System.getenv(ConfigurationKeys.PORT_AUTO_INCREMENT);
+    if (portAutoIncrement != null) {
+      clusterConfig.setPortAutoIncrement(Boolean.parseBoolean(portAutoIncrement));
+    }
+
+    final String clusterNamePrefix = System.getenv(ConfigurationKeys.CLUSTER_NAME_PREFIX);
+    if (clusterNamePrefix != null) {
+      clusterConfig.setClusterNamePrefix(clusterNamePrefix);
+    }
+
+    // Set network config for this hazelcast jet instance
+    final Config config = new Config()
+        .setClusterName(clusterConfig.getClusterNamePrefix());
+    final JoinConfig joinConfig = config.getNetworkConfig()
+        .setPort(clusterConfig.getPort())
+        .setPortAutoIncrement(clusterConfig.isPortAutoIncrement())
+        .getJoin();
+    joinConfig.getMulticastConfig().setEnabled(false);
+    if (clusterConfig.hasBootstrapServer()) {
+      joinConfig.getTcpIpConfig().addMember(clusterConfig.getBootstrapServer());
+    } else if (clusterConfig.hasKubernetesDnsName()) {
+      joinConfig.getKubernetesConfig()
+          .setEnabled(true)
+          .setProperty(hzKubernetesServiceDnsKey, clusterConfig.getKubernetesDnsName());
+    }
+
+    return config;
+  }
+
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ClusterConfig.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ClusterConfig.java
new file mode 100644
index 0000000000000000000000000000000000000000..c5a2141799eb97dbedc5fc82fa456b2efee3a813
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ClusterConfig.java
@@ -0,0 +1,76 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+/**
+ * Configuration of a load generator cluster.
+ */
+public final class ClusterConfig {
+
+  private static final int PORT_DEFAULT = 5701;
+  private static final String CLUSTER_NAME_PREFIX_DEFAULT = "theodolite-hazelcastjet";
+
+  private final String bootstrapServer;
+  private final String kubernetesDnsName;
+  private int port = PORT_DEFAULT;
+  private boolean portAutoIncrement = true;
+  private String clusterNamePrefix = CLUSTER_NAME_PREFIX_DEFAULT;
+
+  /**
+   * Create a new {@link ClusterConfig} with the given parameter values.
+   */
+  private ClusterConfig(final String bootstrapServer, final String kubernetesDnsName) {
+    this.bootstrapServer = bootstrapServer;
+    this.kubernetesDnsName = kubernetesDnsName;
+  }
+
+  public boolean hasBootstrapServer() {
+    return this.bootstrapServer != null;
+  }
+
+  public String getBootstrapServer() {
+    return this.bootstrapServer;
+  }
+
+  public boolean hasKubernetesDnsName() {
+    return this.kubernetesDnsName != null;
+  }
+
+  public String getKubernetesDnsName() {
+    return this.kubernetesDnsName;
+  }
+
+  public int getPort() {
+    return this.port;
+  }
+
+  public boolean isPortAutoIncrement() {
+    return this.portAutoIncrement;
+  }
+
+  public ClusterConfig setPortAutoIncrement(final boolean portAutoIncrement) { // NOPMD
+    this.portAutoIncrement = portAutoIncrement;
+    return this;
+  }
+
+  public ClusterConfig setPort(final int port) { // NOPMD
+    this.port = port;
+    return this;
+  }
+
+  public String getClusterNamePrefix() {
+    return this.clusterNamePrefix;
+  }
+
+  public ClusterConfig setClusterNamePrefix(final String clusterNamePrefix) { // NOPMD
+    this.clusterNamePrefix = clusterNamePrefix;
+    return this;
+  }
+
+  public static ClusterConfig fromBootstrapServer(final String bootstrapServer) {
+    return new ClusterConfig(bootstrapServer, null);
+  }
+
+  public static ClusterConfig fromKubernetesDnsName(final String kubernetesDnsName) {
+    return new ClusterConfig(null, kubernetesDnsName);
+  }
+
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..d1705888430c92ee0cec50ea06871746bbe06cb5
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java
@@ -0,0 +1,33 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+/**
+ * Configuration Keys used for Hazelcast Jet Benchmark implementations.
+ */
+public class ConfigurationKeys {
+
+  // Common Keys
+  public static final String BOOTSTRAP_SERVER = "BOOTSTRAP_SERVER";
+  public static final String KUBERNETES_DNS_NAME = "KUBERNETES_DNS_NAME";
+  public static final String PORT = "PORT";
+  public static final String PORT_AUTO_INCREMENT = "PORT_AUTO_INCREMENT";
+  public static final String CLUSTER_NAME_PREFIX = "CLUSTER_NAME_PREFIX";
+  public static final String KAFKA_BOOTSTRAP_SERVERS = "KAFKA_BOOTSTRAP_SERVERS";
+  public static final String SCHEMA_REGISTRY_URL = "SCHEMA_REGISTRY_URL";
+  public static final String KAFKA_INPUT_TOPIC = "KAFKA_INPUT_TOPIC";
+
+  // Additional topics
+  public static final String KAFKA_OUTPUT_TOPIC = "KAFKA_OUTPUT_TOPIC";
+
+  // UC2
+  public static final String DOWNSAMPLE_INTERVAL = "DOWNSAMPLE_INTERVAL";
+
+  // UC3
+  public static final String WINDOW_SIZE_IN_SECONDS = "WINDOW_SIZE_IN_SECONDS";
+  public static final String HOPPING_SIZE_IN_SECONDS = "HOPPING_SIZE_IN_SECONDS";
+
+  // UC4
+  public static final String KAFKA_CONFIGURATION_TOPIC = "KAFKA_CONFIGURATION_TOPIC";
+  public static final String KAFKA_FEEDBACK_TOPIC = "KAFKA_FEEDBACK_TOPIC";
+  public static final String WINDOW_SIZE_UC4 = "WINDOW_SIZE";
+  
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..cc2ee052d5e2ed7e7b372baf7b59f24ef3e26e8f
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java
@@ -0,0 +1,61 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+import com.hazelcast.config.Config;
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import org.slf4j.Logger;
+
+/**
+ * Builds JetInstances for Benchmark Implementations in Hazelcast Jet.
+ */
+public class JetInstanceBuilder {
+
+  private Config config;
+
+  /**
+   * Set Hazelcast Config for the JetInstance to be built.
+   *
+   * @param hazelcastConfig Config for this JetInstance to be built.
+   * @return A Uc1JetInstanceBuilder with a set Config.
+   */
+  public JetInstanceBuilder setCustomConfig(final Config hazelcastConfig) { // NOPMD
+    this.config = hazelcastConfig;
+    return this;
+  }
+
+  /**
+   * Sets the ClusterConfig for this builder using the clusterConfigBuilder and environment
+   * variables.
+   *
+   * @param logger A specified logger to log procedures
+   * @param bootstrapServerDefault The default bootstrap server used in case no definition by the
+   *        environment is provided.
+   * @return The Uc1HazelcastJetBuilder factory with a set ClusterConfig.
+   */
+  public JetInstanceBuilder setConfigFromEnv(final Logger logger, // NOPMD
+      final String bootstrapServerDefault, final String hzKubernetesServiceDnsKey) {
+    // Use ClusterConfigBuilder to build a cluster config for this microservice
+    final BenchmarkConfigBuilder configBuilder = new BenchmarkConfigBuilder();
+    this.config =
+        configBuilder.buildFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey);
+    return this;
+  }
+
+  /**
+   * Builds and returns a JetInstance. If a config is set, the JetInstance will contain the set
+   * config.
+   *
+   * @return JetInstance
+   */
+  public JetInstance build() {
+    final JetInstance jet = Jet.newJetInstance();
+    if (this.config == null) {
+      return jet;
+    } else {
+      jet.getConfig().setHazelcastConfig(this.config);
+      return jet;
+    }
+
+  }
+
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..9bce60f57a6ecb9da4578e08d8f49bbb34af934a
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java
@@ -0,0 +1,99 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
+import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.producer.ProducerConfig;
+
+
+/**
+ * Generalized builder for Kafka properties.
+ * Will always set AUTO_OFFSET_RESET_CONFIG to earliest
+ *
+ */
+public class KafkaPropertiesBuilder {
+
+  private static final String TRUE = "true";
+  private static final String AUTO_OFFSET_RESET_CONFIG = "earliest";
+
+
+  /**
+   * Builds Kafka Properties used for the UC4 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by environment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @param applicationName Used to set the group id to commit the offsets
+   * @param keyDeserializer Classname for the key deserializer.
+   * @param valueDeserializer Classname for the value deserializer.
+   * @return A Kafka Properties Object containing the values needed for a Pipeline.
+   */
+  public Properties buildKafkaInputReadPropsFromEnv(final String kafkaBootstrapServerDefault,//NOPMD
+                                                    final String schemaRegistryUrlDefault,
+                                                    final String applicationName,
+                                                    final String keyDeserializer,
+                                                    final String valueDeserializer) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
+    props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
+        keyDeserializer);
+    props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
+        valueDeserializer);
+    props.setProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
+    props.setProperty(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, TRUE);
+    props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, AUTO_OFFSET_RESET_CONFIG);
+
+    props.setProperty(ConsumerConfig.GROUP_ID_CONFIG,applicationName);
+    props.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,TRUE);
+
+    return props;
+  }
+
+  /**
+   * Builds Kafka Properties used for the UC4 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by environment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @param keySerializer Classname for the key serializer.
+   * @param valueSerializer Classname for the value serializer.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC4
+   *         Pipeline.
+   */
+  public Properties buildKafkaWritePropsFromEnv(final String kafkaBootstrapServerDefault,//NOPMD
+                                                final String schemaRegistryUrlDefault,
+                                                final String keySerializer,
+                                                final String valueSerializer) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
+    props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
+        keySerializer);
+    props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
+        valueSerializer);
+    props.setProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
+    props.setProperty("specific.avro.writer", TRUE);
+
+    return props;
+  }
+
+
+
+
+
+}
diff --git a/theodolite-benchmarks/load-generator-commons/src/main/java/rocks/theodolite/benchmarks/loadgenerator/HttpRecordSender.java b/theodolite-benchmarks/load-generator-commons/src/main/java/rocks/theodolite/benchmarks/loadgenerator/HttpRecordSender.java
index 77706d824808132eaa7212194de0d69c346e4eba..f740c3696878516f29d0e06ba879cd23010a157b 100644
--- a/theodolite-benchmarks/load-generator-commons/src/main/java/rocks/theodolite/benchmarks/loadgenerator/HttpRecordSender.java
+++ b/theodolite-benchmarks/load-generator-commons/src/main/java/rocks/theodolite/benchmarks/loadgenerator/HttpRecordSender.java
@@ -84,8 +84,9 @@ public class HttpRecordSender<T extends SpecificRecord> implements RecordSender<
     final String json = this.gson.toJson(message);
     final HttpRequest request = HttpRequest.newBuilder()
         .uri(this.uri)
-        .timeout(this.connectionTimeout)
         .POST(HttpRequest.BodyPublishers.ofString(json))
+        .header("Content-Type", "application/json")
+        .timeout(this.connectionTimeout)
         .build();
     final BodyHandler<Void> bodyHandler = BodyHandlers.discarding();
     // final BodyHandler<String> bodyHandler = BodyHandlers.ofString();
diff --git a/theodolite-benchmarks/settings.gradle b/theodolite-benchmarks/settings.gradle
index 776e7d8e4fe132839b6e27c70c368720415721ea..0040989a8b3b02487c2d7328726b7caadb90f32f 100644
--- a/theodolite-benchmarks/settings.gradle
+++ b/theodolite-benchmarks/settings.gradle
@@ -3,12 +3,14 @@ rootProject.name = 'theodolite-benchmarks'
 include 'load-generator-commons'
 include 'kstreams-commons'
 include 'flink-commons'
+include 'hazelcastjet-commons'
 include 'beam-commons'
 
 include 'uc1-load-generator'
 include 'uc1-commons'
 include 'uc1-kstreams'
 include 'uc1-flink'
+include 'uc1-hazelcastjet'
 include 'uc1-beam'
 include 'uc1-beam-flink'
 include 'uc1-beam-samza'
@@ -16,6 +18,7 @@ include 'uc1-beam-samza'
 include 'uc2-load-generator'
 include 'uc2-kstreams'
 include 'uc2-flink'
+include 'uc2-hazelcastjet'
 include 'uc2-beam'
 include 'uc2-beam-flink'
 include 'uc2-beam-samza'
@@ -23,6 +26,7 @@ include 'uc2-beam-samza'
 include 'uc3-load-generator'
 include 'uc3-kstreams'
 include 'uc3-flink'
+include 'uc3-hazelcastjet'
 include 'uc3-beam'
 include 'uc3-beam-flink'
 include 'uc3-beam-samza'
@@ -30,6 +34,7 @@ include 'uc3-beam-samza'
 include 'uc4-load-generator'
 include 'uc4-kstreams'
 include 'uc4-flink'
+include 'uc4-hazelcastjet'
 include 'uc4-beam'
 include 'uc4-beam-flink'
 include 'uc4-beam-samza'
diff --git a/theodolite-benchmarks/uc1-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc1/beam/flink/Uc1BeamFlink.java b/theodolite-benchmarks/uc1-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc1/beam/flink/Uc1BeamFlink.java
index e1317219fedf24bc4b0eb4a3f9668da7de196cca..7f39500433a77612fe5ab010372a24ca46035135 100644
--- a/theodolite-benchmarks/uc1-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc1/beam/flink/Uc1BeamFlink.java
+++ b/theodolite-benchmarks/uc1-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc1/beam/flink/Uc1BeamFlink.java
@@ -17,7 +17,7 @@ public final class Uc1BeamFlink {
   private Uc1BeamFlink() {}
 
   public static void main(final String[] args) {
-    new BeamService(PipelineFactory.factory(), FlinkRunner.class, args).run();
+    new BeamService(PipelineFactory.factory(), FlinkRunner.class, args).runStandalone();
   }
 
 }
diff --git a/theodolite-benchmarks/uc1-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc1/beam/samza/Uc1BeamSamza.java b/theodolite-benchmarks/uc1-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc1/beam/samza/Uc1BeamSamza.java
index d3455db71bc3520bfa11c4da3a58c32da46337f9..9c3f650b7ddbe5e3c08139cdec2e590f5d55f3b3 100644
--- a/theodolite-benchmarks/uc1-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc1/beam/samza/Uc1BeamSamza.java
+++ b/theodolite-benchmarks/uc1-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc1/beam/samza/Uc1BeamSamza.java
@@ -21,6 +21,6 @@ public final class Uc1BeamSamza {
    * Main method.
    */
   public static void main(final String[] args) {
-    new BeamService(PipelineFactory.factory(), SamzaRunner.class, args).run();
+    new BeamService(PipelineFactory.factory(), SamzaRunner.class, args).runStandalone();
   }
 }
diff --git a/theodolite-benchmarks/uc1-beam/src/main/java/rocks/theodolite/benchmarks/uc1/beam/PipelineFactory.java b/theodolite-benchmarks/uc1-beam/src/main/java/rocks/theodolite/benchmarks/uc1/beam/PipelineFactory.java
index 1f35d592ed9b2b1507eb5c30090d392d37ed7c1e..d95d9b3343835f8348af15c3d00c34ef807d4501 100644
--- a/theodolite-benchmarks/uc1-beam/src/main/java/rocks/theodolite/benchmarks/uc1/beam/PipelineFactory.java
+++ b/theodolite-benchmarks/uc1-beam/src/main/java/rocks/theodolite/benchmarks/uc1/beam/PipelineFactory.java
@@ -18,7 +18,7 @@ import titan.ccp.model.records.ActivePowerRecord;
 public class PipelineFactory extends AbstractPipelineFactory {
 
   public static final String SINK_TYPE_KEY = "sink.type";
-  
+
   private final SinkType sinkType = SinkType.from(this.config.getString(SINK_TYPE_KEY));
 
   public PipelineFactory(final Configuration configuration) {
@@ -52,7 +52,8 @@ public class PipelineFactory extends AbstractPipelineFactory {
   protected void registerCoders(final CoderRegistry registry) {
     registry.registerCoderForClass(
         ActivePowerRecord.class,
-        AvroCoder.of(ActivePowerRecord.SCHEMA$));
+        // AvroCoder.of(ActivePowerRecord.SCHEMA$));
+        AvroCoder.of(ActivePowerRecord.class, false));
   }
 
   public static Function<Configuration, AbstractPipelineFactory> factory() {
diff --git a/theodolite-benchmarks/uc1-beam/src/main/java/rocks/theodolite/benchmarks/uc1/beam/WriterAdapter.java b/theodolite-benchmarks/uc1-beam/src/main/java/rocks/theodolite/benchmarks/uc1/beam/WriterAdapter.java
index 4519515cf7d74abb0c447c56df4bbe313133c6a7..c1dc2f7305d01b47de644e4f8d391955540f530c 100644
--- a/theodolite-benchmarks/uc1-beam/src/main/java/rocks/theodolite/benchmarks/uc1/beam/WriterAdapter.java
+++ b/theodolite-benchmarks/uc1-beam/src/main/java/rocks/theodolite/benchmarks/uc1/beam/WriterAdapter.java
@@ -6,7 +6,7 @@ import rocks.theodolite.benchmarks.uc1.commons.DatabaseWriter;
 
 /**
  * {@link DoFn} which wraps a {@link DatabaseAdapter} to be used with Beam.
- * 
+ *
  * @param <T> type the {@link DatabaseWriter} is associated with.
  */
 public class WriterAdapter<T> extends DoFn<T, Void> {
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc1-hazelcastjet/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..7a0fcf7c5f61bed97c9a1e6d455164c64930c4fe
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc1-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc1-hazelcastjet/bin/uc1-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/build.gradle b/theodolite-benchmarks/uc1-hazelcastjet/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..cac5ad9f6f12b62389236decbe75fbec01050071
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/build.gradle
@@ -0,0 +1,9 @@
+plugins {
+  id 'theodolite.hazelcastjet'
+}
+
+dependencies {
+    implementation project(':uc1-commons')
+}
+
+mainClassName = "rocks.theodolite.benchmarks.uc1.hazelcastjet.HistoryService"
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java
new file mode 100644
index 0000000000000000000000000000000000000000..83848261318b2e90d19f28d9ab53fdc2cf678279
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java
@@ -0,0 +1,64 @@
+package rocks.theodolite.benchmarks.uc1.hazelcastjet;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+
+  // Hazelcast settings (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+
+  // Kafka settings (default)
+  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_TOPIC_DEFAULT = "input";
+
+  // Job name (default)
+  private static final String JOB_NAME = "uc1-hazelcastjet";
+
+
+  /**
+   * Entrypoint for UC1 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc1HistoryService = new HistoryService();
+    try {
+      uc1HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      LOGGER.error("ABORT MISSION!: {}", e);
+    }
+  }
+
+  /**
+   * Start a UC1 service.
+   *
+   * @throws Exception This Exception occurs if the Uc1HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC1 using the Uc1HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc1HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc1HazelcastJetFactory()
+        .setPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT,JOB_NAME)
+        .setKafkaInputTopicFromEnv(KAFKA_TOPIC_DEFAULT)
+        .buildUc1Pipeline()
+        .buildUc1JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .runUc1Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1HazelcastJetFactory.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1HazelcastJetFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..4a5c5dead14e606847dc5e2ac3c95414d9f611b3
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1HazelcastJetFactory.java
@@ -0,0 +1,178 @@
+package rocks.theodolite.benchmarks.uc1.hazelcastjet;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC1
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Properties and set the input topic which can be done using internal functions of this
+ * factory. Outside data only refers to custom values or default values in case data of the
+ * environment cannot the fetched.
+ */
+public class Uc1HazelcastJetFactory {
+
+  // Information per History Service
+  private Properties kafkaPropertiesForPipeline;
+  private String kafkaInputTopic;
+  private JetInstance uc1JetInstance;
+  private Pipeline uc1JetPipeline;
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   */
+  public void runUc1Job(final String jobName) {
+
+    // Check if a Jet Instance for UC1 is set.
+    if (this.uc1JetInstance == null) {
+      throw new IllegalStateException("Jet Instance is not set! "
+          + "Cannot start a hazelcast jet job for UC1.");
+    }
+
+    // Check if a Pipeline for UC1 is set.
+    if (this.uc1JetPipeline == null) {
+      throw new IllegalStateException(
+          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC1.");
+    }
+
+    // Adds the job name and joins a job to the JetInstance defined in this factory
+    final JobConfig jobConfig = new JobConfig();
+    jobConfig.setName(jobName);
+    this.uc1JetInstance.newJobIfAbsent(this.uc1JetPipeline, jobConfig).join();
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc1HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc1HazelcastJetFactory buildUc1JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc1JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc1HazelcastJetFactory containg a set pipeline.
+   */
+  public Uc1HazelcastJetFactory buildUc1Pipeline() {
+
+    // Check if Properties for the Kafka Input are set.
+    if (this.kafkaPropertiesForPipeline == null) {
+      throw new IllegalStateException(
+          "Kafka Properties for pipeline not set! Cannot build pipeline.");
+    }
+
+    // Check if the Kafka input topic is set.
+    if (this.kafkaInputTopic == null) {
+      throw new IllegalStateException("Kafka input topic for pipeline not set! "
+          + "Cannot build pipeline.");
+    }
+
+    // Build Pipeline Using the pipelineBuilder
+    final Uc1PipelineBuilder pipeBuilder = new Uc1PipelineBuilder();
+    this.uc1JetPipeline =
+        pipeBuilder.build(this.kafkaPropertiesForPipeline, this.kafkaInputTopic);
+    // Return Uc1HazelcastJetBuilder factory
+    return this;
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka properties for pipeline used in this builder.
+   *
+   * @param kafkaProperties A propeties object containing necessary values used for the hazelcst jet
+   *        kafka connection.
+   * @return The Uc1HazelcastJetBuilder factory with set kafkaPropertiesForPipeline.
+   */
+  public Uc1HazelcastJetFactory setCustomProperties(final Properties kafkaProperties) { // NOPMD
+    this.kafkaPropertiesForPipeline = kafkaProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc1HazelcastJetBuilder factory with set kafkaPropertiesForPipeline.
+   */
+  public Uc1HazelcastJetFactory setPropertiesFromEnv(final String bootstrapServersDefault, // NOPMD
+                                                     final String schemaRegistryUrlDefault,
+                                                     final String jobName) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+    this.kafkaPropertiesForPipeline = kafkaProps;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc1HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc1HazelcastJetFactory setCustomKafkaInputTopic(final String inputTopic) { // NOPMD
+    this.kafkaInputTopic = inputTopic;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc1HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc1HazelcastJetFactory setKafkaInputTopicFromEnv(final String defaultInputTopic) { // NOPMD
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    return this;
+  }
+
+
+
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineBuilder.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..c02ea1e7ea7fb3f27bdbf818248678011a93f6a2
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineBuilder.java
@@ -0,0 +1,82 @@
+package rocks.theodolite.benchmarks.uc1.hazelcastjet;
+
+import static com.hazelcast.jet.pipeline.SinkBuilder.sinkBuilder;
+
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sink;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import java.util.Map.Entry;
+import java.util.Properties;
+import rocks.theodolite.benchmarks.uc1.commons.DatabaseAdapter;
+import rocks.theodolite.benchmarks.uc1.commons.DatabaseWriter;
+import rocks.theodolite.benchmarks.uc1.commons.logger.LogWriterFactory;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC1 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+public class Uc1PipelineBuilder {
+
+  private final DatabaseAdapter<String> databaseAdapter = LogWriterFactory.forJson();
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaPropsForPipeline Properties object containing the necessary Kafka attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @return A Hazelcast Jet pipeline which processes data for Uc1.
+   */
+  public Pipeline build(final Properties kafkaPropsForPipeline, final String kafkaInputTopic) {
+
+    // Define a new pipeline
+    final Pipeline pipe = Pipeline.create();
+
+    // Define the Kafka Source
+    final StreamSource<Entry<String, ActivePowerRecord>> kafkaSource =
+        KafkaSources.<String, ActivePowerRecord>kafka(kafkaPropsForPipeline, kafkaInputTopic);
+
+    // Extend UC1 topology to the pipeline
+    final StreamStage<String> uc1TopologyProduct = this.extendUc1Topology(pipe, kafkaSource);
+
+    // Add Sink: Logger
+    // Do not refactor this to just use the call
+    // (There is a problem with static calls in functions in hazelcastjet)
+    final DatabaseWriter<String> writer = this.databaseAdapter.getDatabaseWriter();
+    final Sink<String> sink = sinkBuilder(
+        "Sink into database", x -> writer)
+        .<String>receiveFn(DatabaseWriter::write)
+        .build();
+
+    uc1TopologyProduct.writeTo(sink);
+
+    return pipe;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC1 topology defines by Theodolite.
+   *
+   * <p>
+   * UC1 takes {@code Entry<String,ActivePowerRecord>} objects and turns them into JSON strings
+   * using GSON.
+   * </p>
+   *
+   * @param pipe The blank hazelcast jet pipeline to extend the logic to.
+   * @param source A streaming source to fetch data from.
+   * @return A {@code StreamStage<String>} with the above definition of the String. It can be used
+   *         to be further modified or directly be written into a sink.
+   */
+  public StreamStage<String> extendUc1Topology(final Pipeline pipe,
+      final StreamSource<Entry<String, ActivePowerRecord>> source) {
+
+    // Build the pipeline topology
+    return pipe.readFrom(source)
+        .withNativeTimestamps(0)
+        .setLocalParallelism(1)
+        .setName("Convert content")
+        .map(Entry::getValue)
+        .map(this.databaseAdapter.getRecordConverter()::convert);
+  }
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..e3371cc87e20e85e6e8c327955537e6e49dab86e
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java b/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..525327ddbcdcddb6cf1bfe4e2d6be62d3384fc0c
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java
@@ -0,0 +1,152 @@
+package rocks.theodolite.benchmarks.uc1.hazelcast;
+
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JetConfig;
+import com.hazelcast.jet.core.JetTestSupport;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sink;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
+import com.hazelcast.jet.pipeline.test.Assertions;
+import com.hazelcast.jet.pipeline.test.TestSources;
+import com.hazelcast.jet.test.SerialTest;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.CompletionException;
+import com.hazelcast.logging.ILogger;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.uc1.commons.DatabaseAdapter;
+import rocks.theodolite.benchmarks.uc1.commons.DatabaseWriter;
+import rocks.theodolite.benchmarks.uc1.commons.logger.LogWriterFactory;
+import rocks.theodolite.benchmarks.uc1.hazelcastjet.Uc1PipelineBuilder;
+import titan.ccp.model.records.ActivePowerRecord;
+
+import static com.hazelcast.jet.pipeline.SinkBuilder.sinkBuilder;
+import static com.hazelcast.logging.Logger.getLogger;
+
+/**
+ * Test methods for the Hazelcast Jet Implementation of UC1.
+ */
+@Category(SerialTest.class)
+public class Uc1PipelineTest extends JetTestSupport {
+
+  private JetInstance testInstance = null;
+  private Pipeline testPipeline = null;
+  private StreamStage<String> uc1Topology = null;
+
+  // Standart Logger
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(Uc1PipelineTest.class);
+  // HazelcastJet Logger
+  private static final ILogger logger =  getLogger(Uc1PipelineTest.class);
+
+  private final DatabaseAdapter<String> databaseAdapter = LogWriterFactory.forJson();
+
+  /**
+   * Creates the JetInstance, defines a new Hazelcast Jet Pipeline and extends the UC1 topology.
+   * Allows for quick extension of tests.
+   */
+  @Before
+  public void buildUc1Pipeline() {
+
+    this.logger.info("Hazelcast Logger");
+    LOGGER.info("Standard Logger");
+
+
+    // Setup Configuration
+    final int testItemsPerSecond = 1;
+    final String testSensorName = "TEST_SENSOR";
+    final Double testValueInW = 10.0;
+
+    // Create mock jet instance with configuration
+    final String testClusterName = randomName();
+    final JetConfig testJetConfig = new JetConfig();
+//    testJetConfig.setProperty( "hazelcast.logging.type", "slf4j" );
+    testJetConfig.getHazelcastConfig().setClusterName(testClusterName);
+    this.testInstance = createJetMember(testJetConfig);
+
+
+    // Create a test source
+    final StreamSource<Entry<String, ActivePowerRecord>> testSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final ActivePowerRecord testRecord =
+              new ActivePowerRecord(testSensorName, timestamp, testValueInW);
+          final Entry<String, ActivePowerRecord> testEntry =
+              Map.entry(testSensorName, testRecord);
+          return testEntry;
+        });
+
+    // Create pipeline to test
+    final Uc1PipelineBuilder pipelineBuilder = new Uc1PipelineBuilder();
+    this.testPipeline = Pipeline.create();
+    this.uc1Topology =
+        pipelineBuilder.extendUc1Topology(this.testPipeline, testSource);
+
+    // Create DatabaseWriter sink
+    final DatabaseWriter<String> adapter = this.databaseAdapter.getDatabaseWriter();
+    final Sink<String> sink = sinkBuilder(
+        "database-sink", x -> adapter)
+        .<String>receiveFn(DatabaseWriter::write)
+        .build();
+
+//    Map Stage, can be used instead of sink
+//    StreamStage<String> log = uc1Topology.map(s -> {
+//        LOGGER.info(s);
+//        return s;
+//    });
+//    log.writeTo(sink);
+
+    //apply sink
+    this.uc1Topology.writeTo(sink);
+  }
+
+  /**
+   * UC1 Pipeline test to check if items are passed through at an acceptable rate.
+   */
+  @Test
+  public void test1Uc1PipelineElements() {
+
+    // Assertion Configuration
+    final int assertTimeoutSeconds = 6;
+    final int assertCollectedItems = 5;
+
+    LOGGER.info("Pipeline build successfully, starting test");
+
+    // Assertion
+    this.uc1Topology.apply(Assertions.assertCollectedEventually(assertTimeoutSeconds,
+        collection -> {
+      //print the newest Record
+//      LOGGER.info(collection.get(collection.size()-1));
+
+          // Run pipeline until 5th item
+          Assert.assertTrue("Not enough data arrived in the end",
+              collection.size() >= assertCollectedItems);
+        }));
+
+    // Test the UC1 Pipeline Recreation
+    try {
+      this.testInstance.newJob(this.testPipeline).join();
+      Assert.fail("Job should have completed with an AssertionCompletedException, "
+          + "but completed normally");
+    } catch (final CompletionException e) {
+      final String errorMsg = e.getCause().getMessage();
+      Assert.assertTrue(
+          "Job was expected to complete with AssertionCompletedException, but completed with: "
+              + e.getCause(),
+          errorMsg.contains(AssertionCompletedException.class.getName()));
+    }
+  }
+
+  @After
+  public void after() {
+    // Shuts down all running Jet Instances
+    Jet.shutdownAll();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc2/beam/flink/Uc2BeamFlink.java b/theodolite-benchmarks/uc2-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc2/beam/flink/Uc2BeamFlink.java
index ab6a9992a5dfca11a182235b467d5be76488ed55..2772d76fa26f504827ab74acb8fccc45f117365c 100644
--- a/theodolite-benchmarks/uc2-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc2/beam/flink/Uc2BeamFlink.java
+++ b/theodolite-benchmarks/uc2-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc2/beam/flink/Uc2BeamFlink.java
@@ -15,7 +15,7 @@ public final class Uc2BeamFlink {
   private Uc2BeamFlink() {}
 
   public static void main(final String[] args) {
-    new BeamService(PipelineFactory.factory(), FlinkRunner.class, args).run();
+    new BeamService(PipelineFactory.factory(), FlinkRunner.class, args).runStandalone();
   }
 }
 
diff --git a/theodolite-benchmarks/uc2-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc2/beam/samza/Uc2BeamSamza.java b/theodolite-benchmarks/uc2-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc2/beam/samza/Uc2BeamSamza.java
index 80981818d401b48ed61ee56987764684df9dd31f..1b3f4ac8a2d052f0d34051e6b17b62100feb129d 100644
--- a/theodolite-benchmarks/uc2-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc2/beam/samza/Uc2BeamSamza.java
+++ b/theodolite-benchmarks/uc2-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc2/beam/samza/Uc2BeamSamza.java
@@ -19,7 +19,7 @@ public final class Uc2BeamSamza {
   private Uc2BeamSamza() {}
 
   public static void main(final String[] args) {
-    new BeamService(PipelineFactory.factory(), SamzaRunner.class, args).run();
+    new BeamService(PipelineFactory.factory(), SamzaRunner.class, args).runStandalone();
   }
 
 }
diff --git a/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/PipelineFactory.java b/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/PipelineFactory.java
index 6de0b8f956c94af36cd70cf44ab691ff97e11ae9..375b2a6cba5256e0644b6beaf26d41e010089250 100644
--- a/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/PipelineFactory.java
+++ b/theodolite-benchmarks/uc2-beam/src/main/java/rocks/theodolite/benchmarks/uc2/beam/PipelineFactory.java
@@ -70,8 +70,10 @@ public class PipelineFactory extends AbstractPipelineFactory {
 
   @Override
   protected void registerCoders(final CoderRegistry registry) {
-    registry.registerCoderForClass(ActivePowerRecord.class,
-        AvroCoder.of(ActivePowerRecord.SCHEMA$));
+    registry.registerCoderForClass(
+        ActivePowerRecord.class,
+        // AvroCoder.of(ActivePowerRecord.SCHEMA$));
+        AvroCoder.of(ActivePowerRecord.class, false));
     registry.registerCoderForClass(StatsAggregation.class,
         SerializableCoder.of(StatsAggregation.class));
     registry.registerCoderForClass(StatsAccumulator.class,
diff --git a/theodolite-benchmarks/uc2-flink/build.gradle b/theodolite-benchmarks/uc2-flink/build.gradle
index 0c50937f3378a8644a1551bcab1f79c173257794..ea4d2d8b285792b8ce66484d0d9c0907a7c49957 100644
--- a/theodolite-benchmarks/uc2-flink/build.gradle
+++ b/theodolite-benchmarks/uc2-flink/build.gradle
@@ -2,4 +2,4 @@ plugins {
   id 'theodolite.flink'
 }
 
-mainClassName = "rocks.theodolite.benchmarks.uc1.flink.HistoryServiceFlinkJob"
+mainClassName = "rocks.theodolite.benchmarks.uc2.flink.HistoryServiceFlinkJob"
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc2-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc2-hazelcastjet/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..4a7680e29042025d48c4c37a8f424871fe48bbf8
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc2-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc2-hazelcastjet/bin/uc2-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/build.gradle b/theodolite-benchmarks/uc2-hazelcastjet/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..ef1597413570a5d7b3af8538ced8d4a98d4fa6f8
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "rocks.theodolite.benchmarks.uc2.hazelcastjet.HistoryService"
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java
new file mode 100644
index 0000000000000000000000000000000000000000..f382978b714fdfdff6c190339c2ed23a2e037069
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java
@@ -0,0 +1,70 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+
+  // Hazelcast settings (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+
+  // Kafka settings (default)
+  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+
+  // UC2 specific (default)
+  private static final String DOWNSAMPLE_INTERVAL_DEFAULT_MS = "60000";
+
+  // Job name (default)
+  private static final String JOB_NAME = "uc2-hazelcastjet";
+
+  /**
+   * Entrypoint for UC2 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc2HistoryService = new HistoryService();
+    try {
+      uc2HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      LOGGER.error("ABORT MISSION!: {}", e);
+    }
+  }
+
+  /**
+   * Start a UC2 service.
+   *
+   * @throws Exception This Exception occurs if the Uc2HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC2 using the Uc1HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc2HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc2HazelcastJetFactory()
+        .setReadPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT,JOB_NAME)
+        .setWritePropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT,SCHEMA_REGISTRY_URL_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
+        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
+        .setDownsampleIntervalFromEnv(DOWNSAMPLE_INTERVAL_DEFAULT_MS)
+        .buildUc2Pipeline()
+        .buildUc2JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .runUc2Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2HazelcastJetFactory.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2HazelcastJetFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..143b154f3726e75d2842766b49bd2e26f57ce39b
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2HazelcastJetFactory.java
@@ -0,0 +1,301 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
+import rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics.StatsAccumulatorSerializer;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC2
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Read and Write Properties, set the input and output topic, and set the downsample interval
+ * which can be done using internal functions of this factory. Outside data only refers to custom
+ * values or default values in case data of the environment cannot the fetched.
+ */
+public class Uc2HazelcastJetFactory {
+
+  // Information per History Service
+  private Properties kafkaReadPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  private JetInstance uc2JetInstance;
+  private Pipeline uc2JetPipeline;
+  // UC2 specific
+  private int downsampleInterval;
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   */
+  public void runUc2Job(final String jobName) {
+
+    // Check if a Jet Instance for UC2 is set.
+    if (this.uc2JetInstance == null) {
+      throw new IllegalStateException("Jet Instance is not set! "
+          + "Cannot start a hazelcast jet job for UC2.");
+    }
+
+    // Check if a Pipeline for UC2 is set.
+    if (this.uc2JetPipeline == null) {
+      throw new IllegalStateException(
+          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC2.");
+    }
+
+    // Adds the job name and joins a job to the JetInstance defined in this factory
+    final JobConfig jobConfig = new JobConfig();
+    jobConfig.registerSerializer(StatsAccumulator.class, StatsAccumulatorSerializer.class);
+    jobConfig.setName(jobName);
+    this.uc2JetInstance.newJobIfAbsent(this.uc2JetPipeline, jobConfig).join();
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc2HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc2HazelcastJetFactory buildUc2JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc2JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc2HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc2HazelcastJetFactory buildUc2Pipeline() throws IllegalStateException { // NOPMD
+
+    final String defaultPipelineWarning = "Cannot build pipeline."; // NOPMD
+
+    // Check if Properties for the Kafka Input are set.
+    if (this.kafkaReadPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Output are set.
+    if (this.kafkaWritePropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Write Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka input topic is set.
+    if (this.kafkaInputTopic == null) {
+      throw new IllegalStateException("Kafka input topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka output topic is set.
+    if (this.kafkaOutputTopic == null) {
+      throw new IllegalStateException("kafka output topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the downsampleInterval (tumbling window time) is set.
+    if (this.downsampleInterval <= 0) {
+      throw new IllegalStateException(
+          "downsample interval for pipeline not set or not bigger than 0! "
+              + defaultPipelineWarning);
+    }
+
+    // Build Pipeline Using the pipelineBuilder
+    final Uc2PipelineBuilder pipeBuilder = new Uc2PipelineBuilder();
+    this.uc2JetPipeline =
+        pipeBuilder.build(this.kafkaReadPropsForPipeline, this.kafkaWritePropsForPipeline,
+            this.kafkaInputTopic, this.kafkaOutputTopic, this.downsampleInterval);
+    // Return Uc2HazelcastJetBuilder factory
+    return this;
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder.
+   *
+   * @param kafkaReadProperties A propeties object containing necessary values used for the hazelcst
+   *        jet kafka connection to read data.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaReadPropsForPipeline.
+   */
+  public Uc2HazelcastJetFactory setCustomReadProperties(// NOPMD
+      final Properties kafkaReadProperties) {
+    this.kafkaReadPropsForPipeline = kafkaReadProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder.
+   *
+   * @param kafkaWriteProperties A propeties object containing necessary values used for the
+   *        hazelcst jet kafka connection to write data.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaWritePropsForPipeline.
+   */
+  public Uc2HazelcastJetFactory setCustomWriteProperties(// NOPMD
+      final Properties kafkaWriteProperties) {
+    this.kafkaWritePropsForPipeline = kafkaWriteProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
+   */
+  public Uc2HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
+                                                         final String bootstrapServersDefault,
+                                                         final String schemaRegistryUrlDefault,
+                                                         final String jobName) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+    this.kafkaReadPropsForPipeline = kafkaReadProps;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
+   */
+  public Uc2HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault, final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaWriteProps =
+        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            StringSerializer.class.getCanonicalName(),
+            StringSerializer.class.getCanonicalName());
+    this.kafkaWritePropsForPipeline = kafkaWriteProps;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc2HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
+      final String inputTopic) {
+    this.kafkaInputTopic = inputTopic;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input output for the pipeline used in this builder.
+   *
+   * @param outputTopic The kafka topic used as the pipeline output.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc2HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
+    this.kafkaOutputTopic = outputTopic;
+    return this;
+  }
+
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc2HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
+      final String defaultInputTopic) {
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc2HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
+      final String defaultOutputTopic) {
+    this.kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        defaultOutputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the downsample interval for the pipeline used in this builder.
+   *
+   * @param downsampleInterval the downsample interval to be used for this pipeline.
+   * @return A Uc2HazelcastJetFactory with a set downsampleInterval.
+   */
+  public Uc2HazelcastJetFactory setCustomDownsampleInterval(// NOPMD
+      final int downsampleInterval) {
+    this.downsampleInterval = downsampleInterval;
+    return this;
+  }
+
+  /**
+   * Sets the downsample interval for the pipeline used in this builder from the environment.
+   *
+   * @param defaultDownsampleInterval the default downsample interval to be used for this pipeline
+   *        when none is set in the environment.
+   * @return A Uc2HazelcastJetFactory with a set downsampleInterval.
+   */
+  public Uc2HazelcastJetFactory setDownsampleIntervalFromEnv(// NOPMD
+      final String defaultDownsampleInterval) {
+    final String downsampleInterval = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.DOWNSAMPLE_INTERVAL),
+        defaultDownsampleInterval);
+    final int downsampleIntervalNumber = Integer.parseInt(downsampleInterval);
+    this.downsampleInterval = downsampleIntervalNumber;
+    return this;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineBuilder.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..73377de6122d4a723c5dbbcb8198fa814c4bed1e
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineBuilder.java
@@ -0,0 +1,135 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.jet.aggregate.AggregateOperation;
+import com.hazelcast.jet.aggregate.AggregateOperation1;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics.StatsAccumulatorSupplier;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC2 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+public class Uc2PipelineBuilder {
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
+   *        attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param downsampleIntervalInMs The window length of the tumbling window used in the aggregation
+   *        of this pipeline.
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC2.
+   */
+  public Pipeline build(final Properties kafkaReadPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline, final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final int downsampleIntervalInMs) {
+
+    // Define a new pipeline
+    final Pipeline pipe = Pipeline.create();
+
+    // Define the Kafka Source
+    final StreamSource<Entry<String, ActivePowerRecord>> kafkaSource =
+        KafkaSources.<String, ActivePowerRecord>kafka(kafkaReadPropsForPipeline, kafkaInputTopic);
+
+    // Extend UC2 topology to the pipeline
+    final StreamStage<Map.Entry<String, String>> uc2TopologyProduct =
+        this.extendUc2Topology(pipe, kafkaSource, downsampleIntervalInMs);
+
+    // Add Sink1: Logger
+    uc2TopologyProduct.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    uc2TopologyProduct.writeTo(KafkaSinks.<String, String>kafka(
+        kafkaWritePropsForPipeline, kafkaOutputTopic));
+
+    return pipe;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC2 topology defined by theodolite.
+   *
+   * <p>
+   * UC2 takes {@code ActivePowerRecord} objects, groups them by keys, windows them in a tumbling
+   * window and aggregates them into {@code Stats} objects. The final map returns an
+   * {@code Entry<String,String>} where the key is the key of the group and the String is the
+   * {@code .toString()} representation of the {@code Stats} object.
+   * </p>
+   *
+   * @param pipe The blank hazelcast jet pipeline to extend the logic to.
+   * @param source A streaming source to fetch data from.
+   * @param downsampleIntervalInMs The size of the tumbling window.
+   * @return A {@code StreamStage<Map.Entry<String,String>>} with the above definition of the key
+   *         and value of the Entry object. It can be used to be further modified or directly be
+   *         written into a sink.
+   */
+  public StreamStage<Map.Entry<String, String>> extendUc2Topology(final Pipeline pipe,
+      final StreamSource<Entry<String, ActivePowerRecord>> source,
+      final int downsampleIntervalInMs) {
+    // Build the pipeline topology.
+    return pipe.readFrom(source)
+        .withNativeTimestamps(0)
+        .setLocalParallelism(1)
+        .groupingKey(record -> record.getValue().getIdentifier())
+        .window(WindowDefinition.tumbling(downsampleIntervalInMs))
+        .aggregate(this.uc2AggregateOperation())
+        .map(agg -> {
+          final String theKey = agg.key();
+          final String theValue = agg.getValue().toString();
+          return Map.entry(theKey, theValue);
+        });
+  }
+
+  /**
+   * Defines an AggregateOperation1 for Hazelcast Jet which is used in the Pipeline of the Hazelcast
+   * Jet implementation of UC2.
+   *
+   * <p>
+   * Takes a windowed and keyed {@code Entry<String,ActivePowerRecord>} elements and returns a
+   * {@Stats} object.
+   * </p>
+   *
+   * @return An AggregateOperation used by Hazelcast Jet in a streaming stage which aggregates
+   *         ActivePowerRecord Objects into Stats Objects.
+   */
+  public AggregateOperation1<Entry<String, ActivePowerRecord>,
+      StatsAccumulator, Stats> uc2AggregateOperation() {
+    // Aggregate Operation to Create a Stats Object from Entry<String,ActivePowerRecord> items using
+    // the Statsaccumulator.
+    return AggregateOperation
+        // Creates the accumulator
+        .withCreate(new StatsAccumulatorSupplier())
+        // Defines the accumulation
+        .<Entry<String, ActivePowerRecord>>andAccumulate((accumulator, item) -> {
+          accumulator.add(item.getValue().getValueInW());
+        })
+        // Defines the combination of spread out instances
+        .andCombine((left, right) -> {
+          final Stats rightStats = right.snapshot();
+          left.addAll(rightStats);
+
+        })
+        // Finishes the aggregation
+        .andExportFinish(
+            (accumulator) -> {
+              return accumulator.snapshot();
+          });
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSerializer.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..5c22b8dd6cc1a7af995a98b4388f40a1a3867ba5
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSerializer.java
@@ -0,0 +1,38 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+
+/**
+ * A serializer and deserializer for the StatsAccumulator which is used in the UC2 implementation
+ * using Hazelcast Jet.
+ */
+public class StatsAccumulatorSerializer implements StreamSerializer<StatsAccumulator> {
+
+  private static final int TYPE_ID = 69_420;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final StatsAccumulator object) throws IOException {
+    final byte[] byteArray = object.snapshot().toByteArray();
+    out.writeByteArray(byteArray);
+  }
+
+  @Override
+  public StatsAccumulator read(final ObjectDataInput in) throws IOException {
+    final byte[] byteArray = in.readByteArray();
+    final Stats deserializedStats = Stats.fromByteArray(byteArray);
+    final StatsAccumulator accumulator = new StatsAccumulator();
+    accumulator.addAll(deserializedStats);
+    return accumulator;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSupplier.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSupplier.java
new file mode 100644
index 0000000000000000000000000000000000000000..f4d203f03185cda712a5280634d8d3858c02f30d
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSupplier.java
@@ -0,0 +1,22 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics;
+
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.function.SupplierEx;
+
+/**
+ * Supplies a StatsAccumulator. Is used in the aggregation operation of the Hazelcast Jet
+ * implementation for UC2.
+ */
+public class StatsAccumulatorSupplier implements SupplierEx<StatsAccumulator> {
+
+  private static final long serialVersionUID = -656395626316842910L; // NOPMD
+
+  /**
+   * Gets a StatsAccumulator.
+   */
+  @Override
+  public StatsAccumulator getEx() throws Exception {
+    return new StatsAccumulator();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..e3371cc87e20e85e6e8c327955537e6e49dab86e
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java b/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..ff72b9558f43334feb8846d50bef2c6714d9404a
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java
@@ -0,0 +1,108 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JetConfig;
+import com.hazelcast.jet.core.JetTestSupport;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
+import com.hazelcast.jet.pipeline.test.Assertions;
+import com.hazelcast.jet.pipeline.test.TestSources;
+import com.hazelcast.jet.test.SerialTest;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.CompletionException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Test methods for the Hazelcast Jet Implementation of UC2.
+ */
+@Category(SerialTest.class)
+public class Uc2PipelineTest extends JetTestSupport {
+
+  JetInstance testInstance = null;
+  Pipeline testPipeline = null;
+  StreamStage<Entry<String, String>> uc2Topology = null;
+
+  /*
+   * Creates the JetInstance, defines a new Hazelcast Jet Pipeline and extends the UC2 topology.
+   * Allows for quick extension of tests.
+   */
+  @Before
+  public void buildUc2Pipeline() {
+
+    // Setup Configuration
+    int testItemsPerSecond = 1;
+    String testSensorName = "TEST-SENSOR";
+    Double testValueInW = 10.0;
+    int testWindowInMs = 5000;
+
+    // Create mock jet instance with configuration
+    final String testClusterName = randomName();
+    final JetConfig testJetConfig = new JetConfig();
+    testJetConfig.getHazelcastConfig().setClusterName(testClusterName);
+    this.testInstance = this.createJetMember(testJetConfig);
+
+    // Create a test source
+    final StreamSource<Entry<String, ActivePowerRecord>> testSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final ActivePowerRecord testRecord =
+              new ActivePowerRecord(testSensorName, timestamp, testValueInW);
+          final Entry<String, ActivePowerRecord> testEntry =
+              Map.entry(testSensorName, testRecord);
+          return testEntry;
+        });
+
+    // Create pipeline to test
+    Uc2PipelineBuilder pipelineBuilder = new Uc2PipelineBuilder();
+    this.testPipeline = Pipeline.create();
+    this.uc2Topology =
+        pipelineBuilder.extendUc2Topology(this.testPipeline, testSource, testWindowInMs);
+
+  }
+
+  /**
+   * Tests if no items reach the end before the first window ends.
+   */
+  @Test
+  public void testOutput() {
+
+    // Assertion Configuration
+    int timeout = 14;
+    String expectedOutput = "Stats{count=5, mean=10.0, populationStandardDeviation=0.0, min=10.0, max=10.0}";
+
+    // Assertion
+    this.uc2Topology.apply(Assertions.assertCollectedEventually(timeout,
+        collection -> Assert.assertTrue(
+            "Not the right amount items in Stats Object!",
+            collection.get(collection.size()-1).getValue().equals(expectedOutput))));
+
+    // Run the test!
+    try {
+      this.testInstance.newJob(this.testPipeline).join();
+      Assert.fail("Job should have completed with an AssertionCompletedException, "
+          + "but completed normally!");
+    } catch (final CompletionException e) {
+      final String errorMsg = e.getCause().getMessage();
+      Assert.assertTrue(
+          "Job was expected to complete with AssertionCompletedException, but completed with: "
+              + e.getCause(),
+          errorMsg.contains(AssertionCompletedException.class.getName()));
+    }
+
+  }
+
+  @After
+  public void after() {
+    // Shuts down all running Jet Instances
+    Jet.shutdownAll();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc3/beam/flink/Uc3BeamFlink.java b/theodolite-benchmarks/uc3-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc3/beam/flink/Uc3BeamFlink.java
index 8782559fea6a08ad2c5a92b355149e3a2ee02ea2..f4f4563925ede4d61edcaab29c3d6e7aed0b5e9c 100644
--- a/theodolite-benchmarks/uc3-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc3/beam/flink/Uc3BeamFlink.java
+++ b/theodolite-benchmarks/uc3-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc3/beam/flink/Uc3BeamFlink.java
@@ -21,7 +21,7 @@ public final class Uc3BeamFlink {
    * Start running this microservice.
    */
   public static void main(final String[] args) {
-    new BeamService(PipelineFactory.factory(), FlinkRunner.class, args).run();
+    new BeamService(PipelineFactory.factory(), FlinkRunner.class, args).runStandalone();
   }
 }
 
diff --git a/theodolite-benchmarks/uc3-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc3/beam/samza/Uc3BeamSamza.java b/theodolite-benchmarks/uc3-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc3/beam/samza/Uc3BeamSamza.java
index 84e705f6f52f41f5c553a1ef3fb2ebd7ce95e20a..247cd99becff8a200185c8fa40efb49bf31a6806 100644
--- a/theodolite-benchmarks/uc3-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc3/beam/samza/Uc3BeamSamza.java
+++ b/theodolite-benchmarks/uc3-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc3/beam/samza/Uc3BeamSamza.java
@@ -21,7 +21,7 @@ public final class Uc3BeamSamza {
    * Start running this microservice.
    */
   public static void main(final String[] args) {
-    new BeamService(PipelineFactory.factory(), SamzaRunner.class, args).run();
+    new BeamService(PipelineFactory.factory(), SamzaRunner.class, args).runStandalone();
   }
 
 }
diff --git a/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/PipelineFactory.java b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/PipelineFactory.java
index de960d3d8466f9f420f002667df04d8a2fc64873..9c766e41254555647dd7ef1eed0417613b7c1629 100644
--- a/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/PipelineFactory.java
+++ b/theodolite-benchmarks/uc3-beam/src/main/java/rocks/theodolite/benchmarks/uc3/beam/PipelineFactory.java
@@ -91,7 +91,8 @@ public class PipelineFactory extends AbstractPipelineFactory {
   protected void registerCoders(final CoderRegistry registry) {
     registry.registerCoderForClass(
         ActivePowerRecord.class,
-        AvroCoder.of(ActivePowerRecord.SCHEMA$));
+        // AvroCoder.of(ActivePowerRecord.SCHEMA$));
+        AvroCoder.of(ActivePowerRecord.class, false));
     registry.registerCoderForClass(
         HourOfDayKey.class,
         new HourOfDayKeyCoder());
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc3-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc3-hazelcastjet/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..6dc99aeb7263ef0084e9721ad9bd908a36cd61f6
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc3-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc3-hazelcastjet/bin/uc3-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/build.gradle b/theodolite-benchmarks/uc3-hazelcastjet/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..737ef6af17cb4c5aa4aa2ee97b5c3e7e4aa0d929
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "rocks.theodolite.benchmarks.uc3.hazelcastjet.HistoryService"
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java
new file mode 100644
index 0000000000000000000000000000000000000000..ecf38bd6c6a85e6d0f1431708a69f3431aff4730
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java
@@ -0,0 +1,72 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+
+  // Hazelcast settings (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+
+  // Kafka settings (default)
+  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+  
+  // UC3 specific (default)
+  private static final String WINDOW_SIZE_IN_SECONDS_DEFAULT = "2629800";
+  private static final String HOPSIZE_IN_SEC_DEFAULT = "86400";
+
+  // Job name (default)
+  private static final String JOB_NAME = "uc3-hazelcastjet";
+
+  /**
+   * Entrypoint for UC3 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc3HistoryService = new HistoryService();
+    try {
+      uc3HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      LOGGER.error("ABORT MISSION!: {}", e);
+    }
+  }
+
+  /**
+   * Start a UC3 service.
+   *
+   * @throws Exception This Exception occurs if the Uc3HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC3 using the Uc3HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc3HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc3HazelcastJetFactory()
+        .setReadPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT, JOB_NAME)
+        .setWritePropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
+        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
+        .setWindowSizeInSecondsFromEnv(WINDOW_SIZE_IN_SECONDS_DEFAULT)
+        .setHoppingSizeInSecondsFromEnv(HOPSIZE_IN_SEC_DEFAULT)
+        .buildUc3Pipeline()
+        .buildUc3JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .runUc3Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3HazelcastJetFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3HazelcastJetFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..be6d70d27b9a868914ec5d28e84b4a90454ab56c
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3HazelcastJetFactory.java
@@ -0,0 +1,341 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKey;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKeySerializer;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC3
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Read and Write Properties, set the input and output topic, and set the window size in seconds
+ * and the hopping size in seconds. This can be done using internal functions of this factory.
+ * Outside data only refers to custom values or default values in case data of the environment
+ * cannot the fetched.
+ */
+public class Uc3HazelcastJetFactory { // NOPMD
+
+  // Information per History Service
+  private Properties kafkaReadPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  private JetInstance uc3JetInstance;
+  private Pipeline uc3JetPipeline;
+  // UC3 specific
+  private int windowSizeInSeconds;
+  private int hoppingSizeInSeconds;
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
+   */
+  public void runUc3Job(final String jobName) throws IllegalStateException { // NOPMD
+
+    // Check if a Jet Instance for UC3 is set.
+    if (this.uc3JetInstance == null) {
+      throw new IllegalStateException("Jet Instance is not set! "
+          + "Cannot start a hazelcast jet job for UC3.");
+    }
+
+    // Check if a Pipeline for UC3 is set.
+    if (this.uc3JetPipeline == null) {
+      throw new IllegalStateException(
+          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC3.");
+    }
+
+    // Adds the job name and joins a job to the JetInstance defined in this factory
+    final JobConfig jobConfig = new JobConfig()
+        .registerSerializer(HourOfDayKey.class, HourOfDayKeySerializer.class)
+        .setName(jobName);
+    this.uc3JetInstance.newJobIfAbsent(this.uc3JetPipeline, jobConfig).join();
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc3HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc3HazelcastJetFactory buildUc3JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc3JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc3HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc3HazelcastJetFactory buildUc3Pipeline() throws IllegalStateException { // NOPMD
+
+    final String defaultPipelineWarning = "Cannot build pipeline."; // NOPMD
+
+    // Check if Properties for the Kafka Input are set.
+    if (this.kafkaReadPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Output are set.
+    if (this.kafkaWritePropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Write Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka input topic is set.
+    if (this.kafkaInputTopic == null) {
+      throw new IllegalStateException("Kafka input topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka output topic is set.
+    if (this.kafkaOutputTopic == null) {
+      throw new IllegalStateException("kafka output topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the window size for the "sliding" window is set.
+    if (this.windowSizeInSeconds <= 0) {
+      throw new IllegalStateException(
+          "window size in seconds for pipeline not set or not greater than 0! "
+              + defaultPipelineWarning);
+    }
+
+    // Check if the hopping distance for the "sliding" window is set.
+    if (this.hoppingSizeInSeconds <= 0) {
+      throw new IllegalStateException(
+          "hopping size in seconds for pipeline not set or not greater than 0! "
+              + defaultPipelineWarning);
+    }
+
+    // Build Pipeline Using the pipelineBuilder
+    final Uc3PipelineBuilder pipeBuilder = new Uc3PipelineBuilder();
+    this.uc3JetPipeline =
+        pipeBuilder.build(this.kafkaReadPropsForPipeline,
+            this.kafkaWritePropsForPipeline,
+            this.kafkaInputTopic, this.kafkaOutputTopic, this.hoppingSizeInSeconds,
+            this.windowSizeInSeconds);
+    // Return Uc3HazelcastJetBuilder factory
+    return this;
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder.
+   *
+   * @param kafkaReadProperties A propeties object containing necessary values used for the hazelcst
+   *        jet kafka connection to read data.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaReadPropsForPipeline.
+   */
+  public Uc3HazelcastJetFactory setCustomReadProperties(// NOPMD
+      final Properties kafkaReadProperties) {
+    this.kafkaReadPropsForPipeline = kafkaReadProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder.
+   *
+   * @param kafkaWriteProperties A propeties object containing necessary values used for the
+   *        hazelcst jet kafka connection to write data.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaWritePropsForPipeline.
+   */
+  public Uc3HazelcastJetFactory setCustomWriteProperties(// NOPMD
+      final Properties kafkaWriteProperties) {
+    this.kafkaWritePropsForPipeline = kafkaWriteProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
+   */
+  public Uc3HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
+                                                         final String bootstrapServersDefault,
+                                                         final String schemaRegistryUrlDefault,
+                                                         final String jobName) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+    this.kafkaReadPropsForPipeline = kafkaReadProps;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
+   */
+  public Uc3HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault, final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaWriteProps =
+        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            StringSerializer.class.getCanonicalName(),
+            StringSerializer.class.getCanonicalName());
+    this.kafkaWritePropsForPipeline = kafkaWriteProps;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc3HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
+      final String inputTopic) {
+    this.kafkaInputTopic = inputTopic;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input output for the pipeline used in this builder.
+   *
+   * @param outputTopic The kafka topic used as the pipeline output.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc3HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
+    this.kafkaOutputTopic = outputTopic;
+    return this;
+  }
+
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc3HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
+      final String defaultInputTopic) {
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc3HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
+      final String defaultOutputTopic) {
+    this.kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        defaultOutputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the window size in seconds for the pipeline used in this builder.
+   *
+   * @param windowSizeInSeconds the windowSizeInSeconds to be used for this pipeline.
+   * @return A Uc3HazelcastJetFactory with a set windowSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setCustomWindowSizeInSeconds(// NOPMD
+      final int windowSizeInSeconds) {
+    this.windowSizeInSeconds = windowSizeInSeconds;
+    return this;
+  }
+
+  /**
+   * Sets the window size in seconds for the pipeline used in this builder from the environment.
+   *
+   * @param defaultWindowSizeInSeconds the default window size in seconds to be used for this
+   *        pipeline when none is set in the environment.
+   * @return A Uc3HazelcastJetFactory with a set windowSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setWindowSizeInSecondsFromEnv(// NOPMD
+      final String defaultWindowSizeInSeconds) {
+    final String windowSizeInSeconds = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.WINDOW_SIZE_IN_SECONDS),
+        defaultWindowSizeInSeconds);
+    final int windowSizeInSecondsNumber = Integer.parseInt(windowSizeInSeconds);
+    this.windowSizeInSeconds = windowSizeInSecondsNumber;
+    return this;
+  }
+
+  /**
+   * Sets the hopping size in seconds for the pipeline used in this builder.
+   *
+   * @param hoppingSizeInSeconds the hoppingSizeInSeconds to be used for this pipeline.
+   * @return A Uc3HazelcastJetFactory with a set hoppingSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setCustomHoppingSizeInSeconds(// NOPMD
+      final int hoppingSizeInSeconds) {
+    this.hoppingSizeInSeconds = hoppingSizeInSeconds;
+    return this;
+  }
+
+  /**
+   * Sets the hopping size in seconds for the pipeline used in this builder from the environment.
+   *
+   * @param defaultHoppingSizeInSeconds the default hopping size in seconds to be used for this
+   *        pipeline when none is set in the environment.
+   * @return A Uc3HazelcastJetFactory with a set hoppingSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setHoppingSizeInSecondsFromEnv(// NOPMD
+      final String defaultHoppingSizeInSeconds) {
+    final String hoppingSizeInSeconds = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.HOPPING_SIZE_IN_SECONDS),
+        defaultHoppingSizeInSeconds);
+    final int hoppingSizeInSecondsNumber = Integer.parseInt(hoppingSizeInSeconds);
+    this.hoppingSizeInSeconds = hoppingSizeInSecondsNumber;
+    return this;
+  }
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineBuilder.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..c8427de60742c2923d4ec17703592f5b8310de0c
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineBuilder.java
@@ -0,0 +1,125 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.hazelcast.jet.aggregate.AggregateOperations;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKey;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HoursOfDayKeyFactory;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.StatsKeyFactory;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC3 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+public class Uc3PipelineBuilder {
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
+   *        attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param hoppingSizeInSeconds The hop length of the sliding window used in the aggregation of
+   *        this pipeline.
+   * @param windowSizeInSeconds The window length of the sliding window used in the aggregation of
+   *        this pipeline.
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC3.
+   */
+  public Pipeline build(final Properties kafkaReadPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline, final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final int hoppingSizeInSeconds, final int windowSizeInSeconds) {
+
+    // Define a new Pipeline
+    final Pipeline pipe = Pipeline.create();
+
+    // Define the source
+    final StreamSource<Entry<String, ActivePowerRecord>> kafkaSource = KafkaSources
+        .<String, ActivePowerRecord>kafka(
+            kafkaReadPropsForPipeline, kafkaInputTopic);
+
+    // Extend topology for UC3
+    final StreamStage<Map.Entry<String, String>> uc3Product =
+        this.extendUc3Topology(pipe, kafkaSource, hoppingSizeInSeconds, windowSizeInSeconds);
+
+    // Add Sink1: Logger
+    uc3Product.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    uc3Product.writeTo(KafkaSinks.<String, String>kafka(
+        kafkaWritePropsForPipeline, kafkaOutputTopic));
+
+    return pipe;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC3 topology defined by theodolite.
+   *
+   * <p>
+   * UC3 takes {@code ActivePowerRecord} object, groups them by keys and calculates average double
+   * values for a sliding window and sorts them into the hour of the day.
+   * </p>
+   *
+   * @param pipe The blank hazelcast jet pipeline to extend the logic to.
+   * @param source A streaming source to fetch data from.
+   * @param hoppingSizeInSeconds The jump distance of the "sliding" window.
+   * @param windowSizeInSeconds The size of the "sliding" window.
+   * @return A {@code StreamStage<Map.Entry<String,String>>} with the above definition of the key
+   *         and value of the Entry object. It can be used to be further modified or directly be
+   *         written into a sink.
+   */
+  public StreamStage<Map.Entry<String, String>> extendUc3Topology(final Pipeline pipe,
+      final StreamSource<Entry<String, ActivePowerRecord>> source, final int hoppingSizeInSeconds,
+      final int windowSizeInSeconds) {
+    // Build the pipeline topology.
+    return pipe
+        .readFrom(source)
+        // use Timestamps
+        .withNativeTimestamps(0)
+        .setLocalParallelism(1)
+        // Map timestamp to hour of day and create new key using sensorID and
+        // datetime mapped to HourOfDay
+        .map(record -> {
+          final String sensorId = record.getValue().getIdentifier();
+          final long timestamp = record.getValue().getTimestamp();
+          final LocalDateTime dateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp),
+              TimeZone.getDefault().toZoneId());
+
+          final StatsKeyFactory<HourOfDayKey> keyFactory = new HoursOfDayKeyFactory();
+          final HourOfDayKey newKey = keyFactory.createKey(sensorId, dateTime);
+
+          return Map.entry(newKey, record.getValue());
+        })
+        // group by new keys
+        .groupingKey(Entry::getKey)
+        // Sliding/Hopping Window
+        .window(WindowDefinition.sliding(TimeUnit.SECONDS.toMillis(windowSizeInSeconds),
+            TimeUnit.SECONDS.toMillis(hoppingSizeInSeconds)))
+        // get average value of group (sensoreId,hourOfDay)
+        .aggregate(
+            AggregateOperations.averagingDouble(record -> record.getValue().getValueInW()))
+        // map to return pair (sensorID,hourOfDay) -> (averaged what value)
+        .map(agg -> {
+          final String theValue = agg.getValue().toString();
+          final String theKey = agg.getKey().toString();
+          return Map.entry(theKey, theValue);
+        });
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKey.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKey.java
new file mode 100644
index 0000000000000000000000000000000000000000..c69f433f3af7ec0484c254af9e59e7d284379cb0
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKey.java
@@ -0,0 +1,50 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+
+import java.util.Objects;
+
+/**
+ * A key consisting of a hour of a day and a sensorID.
+ *
+ */
+public class HourOfDayKey {
+
+  private final int hourOfDay;
+  private final String sensorId;
+
+  public HourOfDayKey(final int hourOfDay, final String sensorId) {
+    this.hourOfDay = hourOfDay;
+    this.sensorId = sensorId;
+  }
+
+  public int getHourOfDay() {
+    return this.hourOfDay;
+  }
+
+  public String getSensorId() {
+    return this.sensorId;
+  }
+
+  @Override
+  public String toString() {
+    return this.sensorId + ";" + this.hourOfDay;
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.hourOfDay, this.sensorId);
+  }
+
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (obj instanceof HourOfDayKey) {
+      final HourOfDayKey other = (HourOfDayKey) obj;
+      return Objects.equals(this.hourOfDay, other.hourOfDay)
+          && Objects.equals(this.sensorId, other.sensorId);
+    }
+    return false;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKeySerializer.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKeySerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..91ba3f2be26f4317a1dec81caf9080da8c1edc9c
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKeySerializer.java
@@ -0,0 +1,32 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+
+/**
+ * A pipeline serializer for the HourOfDayKey to allow for parallelization.
+ * 
+ */
+public class HourOfDayKeySerializer implements StreamSerializer<HourOfDayKey> {
+
+  private static final int TYPE_ID = 1;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final HourOfDayKey key) throws IOException {
+    out.writeInt(key.getHourOfDay());
+    out.writeString(key.getSensorId());
+  }
+
+  @Override
+  public HourOfDayKey read(final ObjectDataInput in) throws IOException {
+    return new HourOfDayKey(in.readInt(), in.readString());
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HoursOfDayKeyFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HoursOfDayKeyFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..4eddb85efebf5b8b07317d0cd39f36b90d3f4fcd
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HoursOfDayKeyFactory.java
@@ -0,0 +1,22 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+
+import java.time.LocalDateTime;
+
+/**
+ * A factory class to build an {@link HourOfDayKey}.
+ *
+ */
+public class HoursOfDayKeyFactory implements StatsKeyFactory<HourOfDayKey> {
+
+  @Override
+  public HourOfDayKey createKey(final String sensorId, final LocalDateTime dateTime) {
+    final int hourOfDay = dateTime.getHour();
+    return new HourOfDayKey(hourOfDay, sensorId);
+  }
+
+  @Override
+  public String getSensorId(final HourOfDayKey key) {
+    return key.getSensorId();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/StatsKeyFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/StatsKeyFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..2a404781e5916473604f14f87b9c3eccf9eda342
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/StatsKeyFactory.java
@@ -0,0 +1,17 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+
+import java.time.LocalDateTime;
+
+/**
+ * Factory interface for creating a stats key from a sensor id and a {@link LocalDateTime} object
+ * and vice versa.
+ *
+ * @param <T> Type of the key
+ */
+public interface StatsKeyFactory<T> {
+
+  T createKey(String sensorId, LocalDateTime dateTime);
+
+  String getSensorId(T key);
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..e3371cc87e20e85e6e8c327955537e6e49dab86e
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java b/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..3df4f4642f1bc6c8637f90bcae3f352f5c298e51
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java
@@ -0,0 +1,162 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JetConfig;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.core.JetTestSupport;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
+import com.hazelcast.jet.pipeline.test.Assertions;
+import com.hazelcast.jet.pipeline.test.TestSources;
+import com.hazelcast.jet.test.SerialTest;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.Map.Entry;
+import java.util.concurrent.CompletionException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.Uc3PipelineBuilder;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKey;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKeySerializer;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Test methods for the Hazelcast Jet Implementation of UC3.
+ */
+@Category(SerialTest.class)
+public class Uc3PipelineTest extends JetTestSupport {
+
+  // Test Machinery
+  private JetInstance testInstance = null;
+  private Pipeline testPipeline = null;
+  private StreamStage<Entry<String, String>> uc3Topology = null;
+
+
+  /**
+   * Creates the JetInstance, defines a new Hazelcast Jet Pipeline and extends the UC3 topology.
+   * Allows for quick extension of tests.
+   */
+  @Before
+  public void buildUc3Pipeline() {
+
+    // Setup Configuration
+    int testItemsPerSecond = 1;
+    String testSensorName = "TEST-SENSOR";
+    Double testValueInW = 10.0;
+    int testHopSizeInSec = 1;
+    int testWindowSizeInSec = 50;
+    // Used to check hourOfDay
+    long mockTimestamp = 1632741651;
+
+
+    // Create mock jet instance with configuration
+    final String testClusterName = randomName();
+    final JetConfig testJetConfig = new JetConfig();
+    testJetConfig.getHazelcastConfig().setClusterName(testClusterName);
+    this.testInstance = this.createJetMember(testJetConfig);
+
+    // Create a test source
+    final StreamSource<Entry<String, ActivePowerRecord>> testSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final ActivePowerRecord testRecord =
+              new ActivePowerRecord(testSensorName, mockTimestamp, testValueInW);
+          final Entry<String, ActivePowerRecord> testEntry =
+              Map.entry(testSensorName, testRecord);
+          return testEntry;
+        });
+
+    // Create pipeline to test
+    Uc3PipelineBuilder pipelineBuilder = new Uc3PipelineBuilder();
+    this.testPipeline = Pipeline.create();
+    this.uc3Topology = pipelineBuilder.extendUc3Topology(testPipeline, testSource,
+        testHopSizeInSec, testWindowSizeInSec);
+  }
+
+  /**
+   * Tests if no items reach the end before the first window ends.
+   */
+  @Test
+  public void testOutput() {
+
+    // Assertion Configuration
+    int timeout = 10;
+    String testSensorName = "TEST-SENSOR";
+    Double testValueInW = 10.0;
+    // Used to check hourOfDay
+    long mockTimestamp = 1632741651;
+
+    // Assertion
+    this.uc3Topology.apply(Assertions.assertCollectedEventually(timeout,
+        collection -> {
+
+          // DEBUG
+          System.out.println("DEBUG: CHECK 1 || Entered Assertion of testOutput()");
+
+          // Check all collected Items
+          boolean allOkay = true;
+          if (collection != null) {
+            System.out.println("DEBUG: CHECK 2 || Collection Size: " + collection.size());
+            for (int i = 0; i < collection.size(); i++) {
+
+              // Build hour of day
+              long timestamp = mockTimestamp;
+              int expectedHour = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp),
+                  TimeZone.getDefault().toZoneId()).getHour();
+
+              // Compare expected output with generated output
+              Entry<String, String> currentEntry = collection.get(i);
+              String expectedKey = testSensorName + ";" + expectedHour;
+              String expectedValue = testValueInW.toString();
+
+              // DEBUG
+              System.out.println(
+                  "DEBUG: CHECK 3 || Expected Output: '" + expectedKey + "=" + expectedValue
+                      + "' - Actual Output: '" + currentEntry.getKey() + "="
+                      + currentEntry.getValue().toString() + "'");
+
+              if (!(currentEntry.getKey().equals(expectedKey)
+                  && currentEntry.getValue().toString().equals(expectedValue))) {
+                System.out.println("DEBUG: CHECK 5 || Failed assertion!");
+                allOkay = false;
+              }
+            }
+          }
+
+          // Assertion
+          Assert.assertTrue(
+              "Items do not match expected structure!", allOkay);
+        }));
+
+    // Run the test!
+    try {
+      final JobConfig jobConfig = new JobConfig()
+          .registerSerializer(HourOfDayKey.class, HourOfDayKeySerializer.class);
+      this.testInstance.newJob(this.testPipeline, jobConfig).join();
+      Assert.fail("Job should have completed with an AssertionCompletedException, "
+          + "but completed normally!");
+    } catch (final CompletionException e) {
+      final String errorMsg = e.getCause().getMessage();
+      Assert.assertTrue(
+          "Job was expected to complete with AssertionCompletedException, but completed with: "
+              + e.getCause(),
+          errorMsg.contains(AssertionCompletedException.class.getName()));
+    }
+
+  }
+
+  @After
+  public void after() {
+    // Shuts down all running Jet Instances
+    Jet.shutdownAll();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc4/beam/flink/Uc4BeamFlink.java b/theodolite-benchmarks/uc4-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc4/beam/flink/Uc4BeamFlink.java
index 5d398d610a12890e3fb9c85804a4b59a69163b4f..f5f9af3fc14b57476975708a139788e7f0386953 100644
--- a/theodolite-benchmarks/uc4-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc4/beam/flink/Uc4BeamFlink.java
+++ b/theodolite-benchmarks/uc4-beam-flink/src/main/java/rocks/theodolite/benchmarks/uc4/beam/flink/Uc4BeamFlink.java
@@ -15,7 +15,7 @@ public final class Uc4BeamFlink {
    * Start running this microservice.
    */
   public static void main(final String[] args) {
-    new BeamService(PipelineFactory.factory(), FlinkRunner.class, args).run();
+    new BeamService(PipelineFactory.factory(), FlinkRunner.class, args).runStandalone();
   }
 
 }
diff --git a/theodolite-benchmarks/uc4-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc4/beam/samza/Uc4BeamSamza.java b/theodolite-benchmarks/uc4-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc4/beam/samza/Uc4BeamSamza.java
index 044b3dc4b647dffa02a62d17c9fcdaf15b0a0869..585e3ff9589c0262c12b6fa33023cd69b58c53f1 100644
--- a/theodolite-benchmarks/uc4-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc4/beam/samza/Uc4BeamSamza.java
+++ b/theodolite-benchmarks/uc4-beam-samza/src/main/java/rocks/theodolite/benchmarks/uc4/beam/samza/Uc4BeamSamza.java
@@ -22,7 +22,7 @@ public final class Uc4BeamSamza {
    * Start running this microservice.
    */
   public static void main(final String[] args) {
-    new BeamService(PipelineFactory.factory(), SamzaRunner.class, args).run();
+    new BeamService(PipelineFactory.factory(), SamzaRunner.class, args).runStandalone();
   }
 
 }
diff --git a/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/PipelineFactory.java b/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/PipelineFactory.java
index a71c24eda5385b10a73b9eb65a83bba8363dd3e7..42d12d82026df0682f771b0cec5c1705ead83b2e 100644
--- a/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/PipelineFactory.java
+++ b/theodolite-benchmarks/uc4-beam/src/main/java/rocks/theodolite/benchmarks/uc4/beam/PipelineFactory.java
@@ -223,7 +223,8 @@ public class PipelineFactory extends AbstractPipelineFactory {
   protected void registerCoders(final CoderRegistry registry) {
     registry.registerCoderForClass(
         ActivePowerRecord.class,
-        AvroCoder.of(ActivePowerRecord.class));
+        // AvroCoder.of(ActivePowerRecord.SCHEMA$));
+        AvroCoder.of(ActivePowerRecord.class, false));
     registry.registerCoderForClass(
         AggregatedActivePowerRecord.class,
         new AggregatedActivePowerRecordCoder());
diff --git a/theodolite-benchmarks/uc4-flink/build.gradle b/theodolite-benchmarks/uc4-flink/build.gradle
index c095c4126bfae6556e51596a2d53ade601cf321f..a6d20c2ddeaf8cd614d9fb3818ad8f18ba28c2a9 100644
--- a/theodolite-benchmarks/uc4-flink/build.gradle
+++ b/theodolite-benchmarks/uc4-flink/build.gradle
@@ -2,4 +2,4 @@ plugins {
   id 'theodolite.flink'
 }
 
-mainClassName = "rocks.theodolite.benchmarks.uc2.flink.AggregationServiceFlinkJob"
+mainClassName = "rocks.theodolite.benchmarks.uc4.flink.AggregationServiceFlinkJob"
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc4-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc4-hazelcastjet/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..a09c59d007a4de426a5046221662cdf1e912ee56
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc4-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc4-hazelcastjet/bin/uc4-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/build.gradle b/theodolite-benchmarks/uc4-hazelcastjet/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..01daa0b88ffea88ed52e1ca6afa682150ade1b50
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "rocks.theodolite.benchmarks.uc4.hazelcastjet.HistoryService"
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java
new file mode 100644
index 0000000000000000000000000000000000000000..419c25fec3eeffbd9eabef4897c44b7c6e773cee
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java
@@ -0,0 +1,74 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+
+  // Hazelcast settings (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+
+  // Kafka settings (default)
+  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_CONFIG_TOPIC_DEFAULT = "configuration";
+  private static final String KAFKA_FEEDBACK_TOPIC_DEFAULT = "aggregation-feedback";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+
+  // UC4 specific (default)
+  private static final String WINDOW_SIZE_DEFAULT_MS = "5000";
+
+  // Job name (default)
+  private static final String JOB_NAME = "uc4-hazelcastjet";
+
+  /**
+   * Entrypoint for UC4 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc4HistoryService = new HistoryService();
+    try {
+      uc4HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      LOGGER.error("ABORT MISSION!: {}", e);
+    }
+  }
+
+  /**
+   * Start a UC4 service.
+   *
+   * @throws Exception This Exception occurs if the Uc4HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC4 using the Uc1HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc4HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc4HazelcastJetFactory()
+        .setReadPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT,JOB_NAME)
+        .setWritePropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
+        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
+        .setKafkaConfigurationTopicFromEnv(KAFKA_CONFIG_TOPIC_DEFAULT)
+        .setKafkaFeedbackTopicFromEnv(KAFKA_FEEDBACK_TOPIC_DEFAULT)
+        .setWindowSizeFromEnv(WINDOW_SIZE_DEFAULT_MS)
+        .buildUc4JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .buildUc4Pipeline()
+        .runUc4Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4HazelcastJetFactory.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4HazelcastJetFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..9b6aa71267150296d8b65268b1922925b7ada796
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4HazelcastJetFactory.java
@@ -0,0 +1,389 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import io.confluent.kafka.serializers.KafkaAvroSerializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.EventDeserializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ImmutableSensorRegistryUc4Serializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKey;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKeySerializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroup;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroupSerializer;
+import titan.ccp.model.sensorregistry.ImmutableSensorRegistry;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC4
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Read and Write Properties and set the input, output, and configuration topic. This can be
+ * done using internal functions of this factory. Outside data only refers to custom values or
+ * default values in case data of the environment cannot the fetched.
+ */
+public class Uc4HazelcastJetFactory {
+
+  // Information per History Service
+  private Properties kafkaInputReadPropsForPipeline;
+  private Properties kafkaConfigPropsForPipeline;
+  private Properties kafkaFeedbackPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  private JetInstance uc4JetInstance;
+  private Pipeline uc4JetPipeline;
+  // UC4 specific
+  private String kafkaConfigurationTopic;
+  private String kafkaFeedbackTopic;
+  private int windowSize;
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
+   */
+  public void runUc4Job(final String jobName) throws IllegalStateException { // NOPMD
+
+    // Check if a Jet Instance for UC4 is set.
+    if (this.uc4JetInstance == null) {
+      throw new IllegalStateException("Jet Instance is not set! "
+          + "Cannot start a hazelcast jet job for UC4.");
+    }
+
+    // Check if a Pipeline for UC3 is set.
+    if (this.uc4JetPipeline == null) {
+      throw new IllegalStateException(
+          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC4.");
+    }
+
+    // Adds the job name and joins a job to the JetInstance defined in this factory
+    final JobConfig jobConfig = new JobConfig()
+        .registerSerializer(ValueGroup.class, ValueGroupSerializer.class)
+        .registerSerializer(SensorGroupKey.class, SensorGroupKeySerializer.class)
+        .registerSerializer(ImmutableSensorRegistry.class,
+            ImmutableSensorRegistryUc4Serializer.class)
+        .setName(jobName);
+    this.uc4JetInstance.newJobIfAbsent(this.uc4JetPipeline, jobConfig).join();
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc4HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc4HazelcastJetFactory buildUc4JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc4JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc4HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc4HazelcastJetFactory buildUc4Pipeline() throws IllegalStateException { // NOPMD
+
+    final String defaultPipelineWarning = "Cannot build pipeline."; // NOPMD
+
+    // Check if Properties for the Kafka Input are set.
+    if (this.kafkaInputReadPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Input Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Output are set.
+    if (this.kafkaWritePropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Write Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Config Read are set.
+    if (this.kafkaConfigPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Config Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Feedback Read are set.
+    if (this.kafkaFeedbackPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Feedback Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka input topic is set.
+    if (this.kafkaInputTopic == null) {
+      throw new IllegalStateException("Kafka input topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka output topic is set.
+    if (this.kafkaOutputTopic == null) {
+      throw new IllegalStateException("kafka output topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka config topic is set.
+    if (this.kafkaConfigurationTopic == null) {
+      throw new IllegalStateException("configuratin topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka feedback topic is set.
+    if (this.kafkaFeedbackTopic == null) {
+      throw new IllegalStateException("Feedback topic not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if window size for tumbling window is set.
+    if (this.windowSize <= 0) {
+      throw new IllegalStateException("window size for pipeline not set or not greater than 0! "
+          + defaultPipelineWarning);
+    }
+
+    // Build Pipeline Using the pipelineBuilder
+    final Uc4PipelineBuilder pipeBuilder = new Uc4PipelineBuilder();
+    this.uc4JetPipeline =
+        pipeBuilder.build(this.kafkaInputReadPropsForPipeline,
+            this.kafkaConfigPropsForPipeline,
+            this.kafkaFeedbackPropsForPipeline,
+            this.kafkaWritePropsForPipeline,
+            this.kafkaInputTopic, this.kafkaOutputTopic,
+            this.kafkaConfigurationTopic,
+            this.kafkaFeedbackTopic,
+            this.windowSize);
+    // Return Uc4HazelcastJetBuilder factory
+    return this;
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc4HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
+   */
+  public Uc4HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
+                                                         final String bootstrapServersDefault,
+                                                         final String schemaRegistryUrlDefault,
+                                                         final String jobName) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+
+    final Properties kafkaInputReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault, jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+
+    final Properties kafkaConfigReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            EventDeserializer.class.getCanonicalName(),
+            StringDeserializer.class.getCanonicalName());
+
+    final Properties kafkaAggregationReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+
+    this.kafkaInputReadPropsForPipeline = kafkaInputReadProps;
+    this.kafkaConfigPropsForPipeline = kafkaConfigReadProps;
+    this.kafkaFeedbackPropsForPipeline = kafkaAggregationReadProps;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @return The Uc4HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
+   */
+  public Uc4HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault, final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaWriteProps =
+        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            StringSerializer.class.getCanonicalName(),
+            KafkaAvroSerializer.class.getCanonicalName());
+    this.kafkaWritePropsForPipeline = kafkaWriteProps;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
+      final String inputTopic) {
+    this.kafkaInputTopic = inputTopic;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input output for the pipeline used in this builder.
+   *
+   * @param outputTopic The kafka topic used as the pipeline output.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
+    this.kafkaOutputTopic = outputTopic;
+    return this;
+  }
+
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
+      final String defaultInputTopic) {
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
+      final String defaultOutputTopic) {
+    this.kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        defaultOutputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the window size for the pipeline used in this builder.
+   *
+   * @param windowSize the window size to be used for this pipeline.
+   * @return A Uc4HazelcastJetFactory with a set windowSize.
+   */
+  public Uc4HazelcastJetFactory setCustomWindowSize(// NOPMD
+      final int windowSize) {
+    this.windowSize = windowSize;
+    return this;
+  }
+
+  /**
+   * Sets the window size for the pipeline used in this builder from the environment.
+   *
+   * @param defaultWindowSize the default window size to be used for this pipeline when none is set
+   *        in the environment.
+   * @return A Uc4HazelcastJetFactory with a set windowSize.
+   */
+  public Uc4HazelcastJetFactory setWindowSizeFromEnv(// NOPMD
+      final String defaultWindowSize) {
+    final String windowSize = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.WINDOW_SIZE_UC4),
+        defaultWindowSize);
+    final int windowSizeNumber = Integer.parseInt(windowSize);
+    this.windowSize = windowSizeNumber;
+    return this;
+  }
+
+  /**
+   * Sets the configuration topic for the pipeline used in this builder.
+   *
+   * @param kafkaConfigurationTopic the configuration topic to be used for this pipeline.
+   * @return A Uc4HazelcastJetFactory with a set configuration topic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaConfigurationTopic(// NOPMD
+      final String kafkaConfigurationTopic) {
+    this.kafkaConfigurationTopic = kafkaConfigurationTopic;
+    return this;
+  }
+
+  /**
+   * Sets the configuration topic for the pipeline used in this builder from the environment.
+   *
+   * @param defaultKafkaConfigurationTopic the default configuration topic to be used for this
+   *        pipeline when none is set in the environment.
+   * @return A Uc4HazelcastJetFactory with a set kafkaConfigurationTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaConfigurationTopicFromEnv(// NOPMD
+      final String defaultKafkaConfigurationTopic) {
+    this.kafkaConfigurationTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC),
+        defaultKafkaConfigurationTopic);
+    return this;
+  }
+
+  /**
+   * Sets the Feedback topic for the pipeline used in this builder.
+   *
+   * @param kafkaFeedbackTopic the Feedback topic to be used for this pipeline.
+   * @return A Uc4HazelcastJetFactory with a set Feedback topic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaFeedbackTopic(// NOPMD
+      final String kafkaFeedbackTopic) {
+    this.kafkaFeedbackTopic = kafkaFeedbackTopic;
+    return this;
+  }
+
+  /**
+   * Sets the Feedback topic for the pipeline used in this builder from the environment.
+   *
+   * @param defaultKafkaFeedbackTopic the default Feedback topic to be used for this pipeline when
+   *        none is set in the environment.
+   * @return A Uc4HazelcastJetFactory with a set kafkaFeedbackTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaFeedbackTopicFromEnv(// NOPMD
+      final String defaultKafkaFeedbackTopic) {
+    this.kafkaFeedbackTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_FEEDBACK_TOPIC),
+        defaultKafkaFeedbackTopic);
+    return this;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineBuilder.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..2efb8250c0e1136b34412e4553b2d216c5e24b43
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineBuilder.java
@@ -0,0 +1,309 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
+
+import com.hazelcast.function.BiFunctionEx;
+import com.hazelcast.jet.Traverser;
+import com.hazelcast.jet.Traversers;
+import com.hazelcast.jet.Util;
+import com.hazelcast.jet.aggregate.AggregateOperation;
+import com.hazelcast.jet.aggregate.AggregateOperation1;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StageWithWindow;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.StreamStageWithKey;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.Set;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.AggregatedActivePowerRecordAccumulator;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ChildParentsTransformer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKey;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroup;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.records.ActivePowerRecord;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+import titan.ccp.model.sensorregistry.SensorRegistry;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC4 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+@SuppressWarnings("PMD.ExcessiveImports")
+public class Uc4PipelineBuilder {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(Uc4PipelineBuilder.class);
+  private static final String SENSOR_PARENT_MAP_NAME = "SensorParentMap";
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaInputReadPropsForPipeline Properties Object containing the necessary kafka input
+   *        read attributes.
+   * @param kafkaConfigPropsForPipeline Properties Object containing the necessary kafka config read
+   *        attributes.
+   * @param kafkaFeedbackPropsForPipeline Properties Object containing the necessary kafka
+   *        aggregation read attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param kafkaConfigurationTopic The name of the configuration topic used for the pipeline.
+   * @param kafkaFeedbackTopic The name of the feedback topic used for the pipeline.
+   * @param windowSize The window size of the tumbling window used in this pipeline.
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC3.
+   */
+  public Pipeline build(final Properties kafkaInputReadPropsForPipeline, // NOPMD
+      final Properties kafkaConfigPropsForPipeline,
+      final Properties kafkaFeedbackPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline,
+      final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final String kafkaConfigurationTopic,
+      final String kafkaFeedbackTopic,
+      final int windowSize) {
+
+    if (LOGGER.isInfoEnabled()) {
+      LOGGER.info("kafkaConfigProps: " + kafkaConfigPropsForPipeline);
+      LOGGER.info("kafkaFeedbackProps: " + kafkaFeedbackPropsForPipeline);
+      LOGGER.info("kafkaWriteProps: " + kafkaWritePropsForPipeline);
+    }
+
+    // The pipeline for this Use Case
+    final Pipeline uc4Pipeline = Pipeline.create();
+
+    // Sources for this use case
+    final StreamSource<Entry<Event, String>> configSource =
+        KafkaSources.kafka(kafkaConfigPropsForPipeline, kafkaConfigurationTopic);
+
+    final StreamSource<Entry<String, ActivePowerRecord>> inputSource =
+        KafkaSources.kafka(kafkaInputReadPropsForPipeline, kafkaInputTopic);
+
+    final StreamSource<Entry<String, AggregatedActivePowerRecord>> aggregationSource =
+        KafkaSources.kafka(kafkaFeedbackPropsForPipeline, kafkaFeedbackTopic);
+
+    // Extend UC4 topology to pipeline
+    final StreamStage<Entry<String, AggregatedActivePowerRecord>> uc4Aggregation =
+        this.extendUc4Topology(uc4Pipeline, inputSource, aggregationSource, configSource,
+            windowSize);
+
+    // Add Sink2: Write back to kafka feedback/aggregation topic
+    uc4Aggregation.writeTo(KafkaSinks.kafka(
+        kafkaWritePropsForPipeline, kafkaFeedbackTopic));
+
+    // Log aggregation product
+    uc4Aggregation.writeTo(Sinks.logger());
+
+    // Add Sink2: Write back to kafka output topic
+    uc4Aggregation.writeTo(KafkaSinks.kafka(
+        kafkaWritePropsForPipeline, kafkaOutputTopic));
+
+    // Return the pipeline
+    return uc4Pipeline;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC4 topology defines by theodolite.
+   *
+   * <p>
+   * UC4 takes {@code ActivePowerRecord} events from sensors and a {@code SensorRegistry} with maps
+   * from keys to groups to map values to their according groups. A feedback stream allows for
+   * group keys to be mapped to values and eventually to be mapped to other top level groups defines
+   * by the {@code SensorRegistry}.
+   * </p>
+   *
+   * <p>
+   * 6 Step topology: <br>
+   * (1) Inputs (Config, Values, Aggregations) <br>
+   * (2) Merge Input Values and Aggregations <br>
+   * (3) Join Configuration with Merged Input Stream <br>
+   * (4) Duplicate as flatmap per value and group <br>
+   * (5) Window (preparation for possible last values) <br>
+   * (6) Aggregate data over the window
+   * </p>
+   *
+   * @param pipe The blank pipeline to extend the logic to.
+   * @param inputSource A streaming source with {@code ActivePowerRecord} data.
+   * @param aggregationSource A streaming source with aggregated data.
+   * @param configurationSource A streaming source delivering a {@code SensorRegistry}.
+   * @param windowSize The window size used to aggregate over.
+   * @return A {@code StreamSource<String,Double>} with sensorKeys or groupKeys mapped to their
+   *         according aggregated values. The data can be further modified or directly be linked to
+   *         a Hazelcast Jet sink.
+   */
+  public StreamStage<Entry<String, AggregatedActivePowerRecord>> extendUc4Topology(// NOPMD
+      final Pipeline pipe,
+      final StreamSource<Entry<String, ActivePowerRecord>> inputSource,
+      final StreamSource<Entry<String, AggregatedActivePowerRecord>> aggregationSource,
+      final StreamSource<Entry<Event, String>> configurationSource, final int windowSize) {
+
+    //////////////////////////////////
+    // (1) Configuration Stream
+    pipe.readFrom(configurationSource)
+        .withNativeTimestamps(0)
+        .filter(entry -> entry.getKey() == Event.SENSOR_REGISTRY_CHANGED
+            || entry.getKey() == Event.SENSOR_REGISTRY_STATUS)
+        .map(data -> Util.entry(data.getKey(), SensorRegistry.fromJson(data.getValue())))
+        .flatMapStateful(HashMap::new, new ConfigFlatMap())
+        .writeTo(Sinks.mapWithUpdating(
+            SENSOR_PARENT_MAP_NAME, // The addressed IMAP
+            Entry::getKey, // The key to look for
+            (oldValue, newEntry) -> newEntry.getValue()));
+
+    //////////////////////////////////
+    // (1) Sensor Input Stream
+    final StreamStage<Entry<String, ActivePowerRecord>> inputStream = pipe
+        .readFrom(inputSource)
+        .withNativeTimestamps(0);
+
+    //////////////////////////////////
+    // (1) Aggregation Stream
+    final StreamStage<Entry<String, ActivePowerRecord>> aggregations = pipe
+        .readFrom(aggregationSource)
+        .withNativeTimestamps(0)
+        .map(entry -> { // Map Aggregated to ActivePowerRecord
+          final AggregatedActivePowerRecord agg = entry.getValue();
+          final ActivePowerRecord record = new ActivePowerRecord(
+              agg.getIdentifier(), agg.getTimestamp(), agg.getSumInW());
+          return Util.entry(entry.getKey(), record);
+        });
+
+    //////////////////////////////////
+    // (2) UC4 Merge Input with aggregation stream
+    final StreamStageWithKey<Entry<String, ActivePowerRecord>, String>
+        mergedInputAndAggregations = inputStream
+        .merge(aggregations)
+        .groupingKey(Entry::getKey);
+
+    //////////////////////////////////
+    // (3) UC4 Join Configuration and Merges Input/Aggregation Stream
+    // [sensorKey , (value,Set<Groups>)]
+    final StreamStage<Entry<String, ValueGroup>> joinedStage = mergedInputAndAggregations
+        .<Set<String>, Entry<String, ValueGroup>>mapUsingIMap(
+            SENSOR_PARENT_MAP_NAME,
+            (sensorEvent, sensorParentsSet) -> {
+              // Check whether a groupset exists for a key or not
+              if (sensorParentsSet == null) {
+                // No group set exists for this key: return valuegroup with default null group set.
+                final Set<String> nullSet = new HashSet<>();
+                nullSet.add("NULL-GROUPSET");
+                return Util.entry(sensorEvent.getKey(),
+                    new ValueGroup(sensorEvent.getValue(), nullSet));
+              } else {
+                // Group set exists for this key: return valuegroup with the groupset.
+                final ValueGroup valueParentsPair =
+                    new ValueGroup(sensorEvent.getValue(), sensorParentsSet);
+                // Return solution
+                return Util.entry(sensorEvent.getKey(), valueParentsPair);
+              }
+            });
+
+    //////////////////////////////////
+    // (4) UC4 Duplicate as flatmap joined Stream
+    // [(sensorKey, Group) , value]
+    final StreamStage<Entry<SensorGroupKey, ActivePowerRecord>> dupliAsFlatmappedStage = joinedStage
+        .flatMap(entry -> {
+
+          // Supplied data
+          final String keyGroupId = entry.getKey();
+          final ActivePowerRecord record = entry.getValue().getRecord();
+          final Set<String> groups = entry.getValue().getGroups();
+
+          // Transformed Data
+          final String[] groupList = groups.toArray(String[]::new);
+          final SensorGroupKey[] newKeyList = new SensorGroupKey[groupList.length];
+          final List<Entry<SensorGroupKey, ActivePowerRecord>> newEntryList = new ArrayList<>();
+          for (int i = 0; i < groupList.length; i++) {
+            newKeyList[i] = new SensorGroupKey(keyGroupId, groupList[i]);
+            newEntryList.add(Util.entry(newKeyList[i], record));
+          }
+
+          // Return traversable list of new entry elements
+          return Traversers.traverseIterable(newEntryList);
+        });
+
+    //////////////////////////////////
+    // (5) UC4 Last Value Map
+    // Table with tumbling window differentiation [ (sensorKey,Group) , value ],Time
+    final StageWithWindow<Entry<SensorGroupKey, ActivePowerRecord>>
+        windowedLastValues = dupliAsFlatmappedStage
+        .window(WindowDefinition.tumbling(windowSize));
+
+    final AggregateOperation1<Entry<SensorGroupKey, ActivePowerRecord>,
+        AggregatedActivePowerRecordAccumulator, AggregatedActivePowerRecord> aggrOp =
+        AggregateOperation
+        .withCreate(AggregatedActivePowerRecordAccumulator::new)
+        .<Entry<SensorGroupKey, ActivePowerRecord>>andAccumulate((acc, rec) -> {
+          acc.setId(rec.getKey().getGroup());
+          acc.addInputs(rec.getValue());
+        })
+        .andCombine((acc, acc2) ->
+            acc.addInputs(acc2.getId(), acc2.getSumInW(), acc2.getCount(), acc.getTimestamp()))
+        .andDeduct((acc, acc2) -> acc.removeInputs(acc2.getSumInW(), acc2.getCount()))
+        .andExportFinish(acc ->
+            new AggregatedActivePowerRecord(acc.getId(),
+                acc.getTimestamp(),
+                acc.getCount(),
+                acc.getSumInW(),
+                acc.getAverageInW())
+        );
+
+    // write aggregation back to kafka
+
+    return windowedLastValues
+        .groupingKey(entry -> entry.getKey().getGroup())
+        .aggregate(aggrOp).map(agg -> Util.entry(agg.getKey(), agg.getValue()));
+  }
+
+
+
+  /**
+   * FlatMap function used to process the configuration input for UC4.
+   */
+  private static class ConfigFlatMap implements
+      BiFunctionEx<Map<String, Set<String>>, Entry<Event, SensorRegistry>, Traverser<Entry<String, Set<String>>>> { // NOCS
+
+    private static final long serialVersionUID = -6769931374907428699L;
+
+    @Override
+    public Traverser<Entry<String, Set<String>>> applyEx(
+        final Map<String, Set<String>> flatMapStage,
+        final Entry<Event, SensorRegistry> eventItem) {
+      // Transform new Input
+      final ChildParentsTransformer transformer = new ChildParentsTransformer("default-name");
+      final Map<String, Set<String>> mapFromRegistry =
+          transformer.constructChildParentsPairs(eventItem.getValue());
+
+      // Compare both tables
+      final Map<String, Set<String>> updates = new HashMap<>();
+      for (final String key : mapFromRegistry.keySet()) {
+        if (flatMapStage.containsKey(key)) {
+          if (!mapFromRegistry.get(key).equals(flatMapStage.get(key))) {
+            updates.put(key, mapFromRegistry.get(key));
+          }
+        } else {
+          updates.put(key, mapFromRegistry.get(key));
+        }
+      }
+
+      // Create a updates list to pass onto the next pipeline stage-
+      final List<Entry<String, Set<String>>> updatesList = new ArrayList<>(updates.entrySet());
+
+      // Return traverser with updates list.
+      return Traversers.traverseIterable(updatesList)
+          .map(e -> Util.entry(e.getKey(), e.getValue()));
+    }
+
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/AggregatedActivePowerRecordAccumulator.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/AggregatedActivePowerRecordAccumulator.java
new file mode 100644
index 0000000000000000000000000000000000000000..3166f16cd31bf0e6d4dff6548468791e7a5e5c5c
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/AggregatedActivePowerRecordAccumulator.java
@@ -0,0 +1,100 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Accumulator class for AggregatedActivePowerRecords.
+ */
+public class AggregatedActivePowerRecordAccumulator {
+
+  private String id;
+  private long timestamp;
+  private long count;
+  private double sumInW;
+  private double averageInW;
+
+  /**
+   * Default constructor.
+   */
+  public AggregatedActivePowerRecordAccumulator() {
+    // This constructor is intentionally empty. Nothing special is needed here.
+  }
+
+
+  /**
+   * Creates an AggregationObject.
+   */
+  public AggregatedActivePowerRecordAccumulator(final String id,
+                                                final long timestamp,
+                                                final long count,
+                                                final double sumInW,
+                                                final double averageInW) {
+    this.id = id;
+    this.timestamp = timestamp;
+    this.count = count;
+    this.sumInW = sumInW;
+    this.averageInW = averageInW;
+  }
+
+  /**
+   * Sets the id.
+   */
+  public void setId(final String id) {
+    this.id = id;
+  }
+
+  /**
+   * Adds the record to the aggregation.
+   */
+  public void addInputs(final ActivePowerRecord record) {
+    this.count += 1;
+    this.sumInW += record.getValueInW();
+    this.timestamp = record.getTimestamp();
+    this.averageInW = sumInW / count;
+  }
+
+  /**
+   * Adds the records from another aggregator.
+   */
+  public void addInputs(final String id,
+                        final double sumInW,
+                        final long count,
+                        final long timestamp) {
+    this.id = this.id == null ? id : this.id;
+    this.sumInW += sumInW;
+    this.count += count;
+    this.timestamp = Math.max(this.timestamp, timestamp);
+    this.averageInW = this.sumInW / this.count;
+  }
+
+  /**
+   * Removes the values of another aggreagator.
+   * Not a complete reset since the old timestamp is lost.
+   */
+  public void removeInputs(final double sumInW, final long count) {
+    this.sumInW -= sumInW;
+    this.count -= count;
+    this.averageInW = this.count == 0 ? 0.0 : this.sumInW / this.count;
+    this.timestamp = -1L;
+  }
+
+  public long getCount() {
+    return count;
+  }
+
+  public double getSumInW() {
+    return sumInW;
+  }
+
+  public double getAverageInW() {
+    return averageInW;
+  }
+
+  public String getId() {
+    return id;
+  }
+
+  public long getTimestamp() {
+    return timestamp;
+  }
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ChildParentsTransformer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ChildParentsTransformer.java
new file mode 100644
index 0000000000000000000000000000000000000000..ad3b2294cb934ba04b07df2e2b2d3dbdd6e1a905
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ChildParentsTransformer.java
@@ -0,0 +1,118 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.apache.kafka.streams.KeyValue;
+import org.apache.kafka.streams.kstream.Transformer;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.state.KeyValueIterator;
+import org.apache.kafka.streams.state.KeyValueStore;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.sensorregistry.AggregatedSensor;
+import titan.ccp.model.sensorregistry.Sensor;
+import titan.ccp.model.sensorregistry.SensorRegistry;
+
+/**
+ * Transforms a {@link SensorRegistry} into key value pairs of Sensor identifiers and their parents'
+ * sensor identifiers. All pairs whose sensor's parents have changed since last iteration are
+ * forwarded. A mapping of an identifier to <code>null</code> means that the corresponding sensor
+ * does not longer exists in the sensor registry.
+ *
+ */
+public class ChildParentsTransformer implements
+    Transformer<Event, SensorRegistry, Iterable<KeyValue<String, Optional<Set<String>>>>> {
+
+  private final String stateStoreName;
+  // private ProcessorContext context;
+  private KeyValueStore<String, Set<String>> state;
+
+  public ChildParentsTransformer(final String stateStoreName) {
+    this.stateStoreName = stateStoreName;
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public void init(final ProcessorContext context) {
+    // this.context = context;
+    this.state = (KeyValueStore<String, Set<String>>) context.getStateStore(this.stateStoreName);
+  }
+
+  @Override
+  public Iterable<KeyValue<String, Optional<Set<String>>>> transform(final Event event,
+      final SensorRegistry registry) {
+
+    // Values may later be null for deleting a sensor
+    final Map<String, Set<String>> childParentsPairs = this.constructChildParentsPairs(registry);
+
+    this.updateChildParentsPairs(childParentsPairs);
+
+    this.updateState(childParentsPairs);
+
+    return childParentsPairs
+        .entrySet()
+        .stream()
+        .map(e -> KeyValue.pair(e.getKey(), Optional.ofNullable(e.getValue())))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public void close() {
+    // Do nothing
+  }
+
+  /**
+   * Constructs a map of keys to their set of parents out of a SensorRegistry.
+   *
+   * @param registry The SensorRegistry to build the map out of.
+   * @return A map of keys to a set of their parents.
+   */
+  public Map<String, Set<String>> constructChildParentsPairs(final SensorRegistry registry) {
+    return this.streamAllChildren(registry.getTopLevelSensor())
+        .collect(Collectors.toMap(
+            Sensor::getIdentifier,
+            child -> child.getParent()
+                .map(p -> Set.of(p.getIdentifier()))
+                .orElseGet(Set::of)));
+  }
+
+  private Stream<Sensor> streamAllChildren(final AggregatedSensor sensor) {
+    return sensor.getChildren().stream()
+        .flatMap(s -> Stream.concat(
+            Stream.of(s),
+            s instanceof AggregatedSensor ? this.streamAllChildren((AggregatedSensor) s)
+                : Stream.empty()));
+  }
+
+  private void updateChildParentsPairs(final Map<String, Set<String>> childParentsPairs) {
+    final KeyValueIterator<String, Set<String>> oldChildParentsPairs = this.state.all();
+    while (oldChildParentsPairs.hasNext()) {
+      final KeyValue<String, Set<String>> oldChildParentPair = oldChildParentsPairs.next();
+      final String identifier = oldChildParentPair.key;
+      final Set<String> oldParents = oldChildParentPair.value;
+      final Set<String> newParents = childParentsPairs.get(identifier); // null if not exists
+      if (newParents == null) {
+        // Sensor was deleted
+        childParentsPairs.put(identifier, null);
+      } else if (newParents.equals(oldParents)) {
+        // No changes
+        childParentsPairs.remove(identifier);
+      }
+      // Else: Later Perhaps: Mark changed parents
+    }
+    oldChildParentsPairs.close();
+  }
+
+  private void updateState(final Map<String, Set<String>> childParentsPairs) {
+    for (final Map.Entry<String, Set<String>> childParentPair : childParentsPairs.entrySet()) {
+      if (childParentPair.getValue() == null) {
+        this.state.delete(childParentPair.getKey());
+      } else {
+        this.state.put(childParentPair.getKey(), childParentPair.getValue());
+      }
+    }
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/EventDeserializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/EventDeserializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..c8d06b497009944b9a9a0fda4ab224e5fe992e3d
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/EventDeserializer.java
@@ -0,0 +1,32 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import java.util.Map;
+import org.apache.kafka.common.serialization.Deserializer;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.configuration.events.EventSerde;
+
+/**
+ * Deserializer for Event Objects.
+ *
+ */
+public class EventDeserializer implements Deserializer<Event> {
+
+  private final Deserializer<Event> deserializer = EventSerde.serde().deserializer();
+
+  @Override
+  public void configure(final Map<String, ?> configs, final boolean isKey) {
+    this.deserializer.configure(configs, isKey);
+  }
+
+  @Override
+  public Event deserialize(final String topic, final byte[] data) {
+    return this.deserializer.deserialize(topic, data);
+  }
+
+  @Override
+  public void close() {
+    this.deserializer.close();
+  }
+
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/HashMapSupplier.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/HashMapSupplier.java
new file mode 100644
index 0000000000000000000000000000000000000000..ec240bf8cb925aa3a444b56457da5adc411212b2
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/HashMapSupplier.java
@@ -0,0 +1,26 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import com.hazelcast.function.SupplierEx;
+import java.util.HashMap;
+import java.util.Set;
+
+/**
+ * Supplies a {@link HashMap} and implements {@link SupplierEx}.
+ */
+public class HashMapSupplier implements SupplierEx<HashMap<String, Set<String>>> {
+
+  private static final long serialVersionUID = -6247504592403610702L; // NOPMD
+
+  @Override
+  public HashMap<String, Set<String>> get() {
+    return new HashMap<>();
+  }
+
+  @Override
+  public HashMap<String, Set<String>> getEx() throws Exception {
+    return this.get();
+  }
+
+
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ImmutableSensorRegistryUc4Serializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ImmutableSensorRegistryUc4Serializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..53d22f7f156891cf11e5b8915eed17b74c3d57fb
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ImmutableSensorRegistryUc4Serializer.java
@@ -0,0 +1,36 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+import titan.ccp.model.sensorregistry.ImmutableSensorRegistry;
+
+/**
+ * {@link StreamSerializer} for Hazelcast Jet to serialize and deserialize an
+ * {@link ImmutableSensorRegistry}.
+ */
+public class ImmutableSensorRegistryUc4Serializer
+    implements StreamSerializer<ImmutableSensorRegistry> {
+
+  private static final int TYPE_ID = 3;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final ImmutableSensorRegistry object)
+      throws IOException {
+    final String sensorRegistryJson = object.toJson();
+    out.writeString(sensorRegistryJson);
+  }
+
+  @Override
+  public ImmutableSensorRegistry read(final ObjectDataInput in) throws IOException {
+    final String sensorRegistryJson = in.readString();
+    return (ImmutableSensorRegistry) ImmutableSensorRegistry.fromJson(sensorRegistryJson);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKey.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKey.java
new file mode 100644
index 0000000000000000000000000000000000000000..24114cc90a709c99e74495714559c12324e07788
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKey.java
@@ -0,0 +1,50 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import java.util.Objects;
+
+/**
+ * Structure (sensorId, group).
+ */
+public class SensorGroupKey {
+
+  private final String sensorId;
+  private final String group;
+
+  public SensorGroupKey(final String sensorId, final String group) {
+    this.sensorId = sensorId;
+    this.group = group;
+  }
+
+  public String getSensorId() {
+    return this.sensorId;
+  }
+
+  public String getGroup() {
+    return this.group;
+  }
+
+  @Override
+  public String toString() {
+    return "[SensorId: " + this.sensorId + "; Group: " + this.group + "]";
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.sensorId, this.group);
+  }
+
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (obj instanceof SensorGroupKey) {
+      final SensorGroupKey other = (SensorGroupKey) obj;
+      return Objects.equals(this.sensorId, other.sensorId)
+          && Objects.equals(this.group, other.group);
+    }
+    return false;
+  }
+
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKeySerializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKeySerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..12a46b9d8f91ea145f614654a6ce9813b9014290
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKeySerializer.java
@@ -0,0 +1,31 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+
+/**
+ * Serializes and Deserializes a SensorGroupKey.
+ */
+public class SensorGroupKeySerializer implements StreamSerializer<SensorGroupKey> {
+
+  private static final int TYPE_ID = 2;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final SensorGroupKey key) throws IOException {
+    out.writeString(key.getSensorId());
+    out.writeString(key.getGroup());
+  }
+
+  @Override
+  public SensorGroupKey read(final ObjectDataInput in) throws IOException {
+    return new SensorGroupKey(in.readString(), in.readString());
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroup.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroup.java
new file mode 100644
index 0000000000000000000000000000000000000000..893efcf74fe8a16202d795fca5cc43b63190dc50
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroup.java
@@ -0,0 +1,59 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import java.util.Objects;
+import java.util.Set;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Structure: (valueInW, Set(Groups)).
+ */
+public class ValueGroup {
+
+  private final ActivePowerRecord record;
+  private final Set<String> groups;
+
+  public ValueGroup(final ActivePowerRecord record, final Set<String> groups) {
+    this.record = record;
+    this.groups = groups;
+  }
+
+  public ActivePowerRecord getRecord() {
+    return this.record;
+  }
+
+  public Double getValueInW() {
+    return this.record.getValueInW();
+  }
+
+  public Set<String> getGroups() {
+    return this.groups;
+  }
+
+  @Override
+  public String toString() {
+    String groupString = "[";
+    for (final String group : this.groups) {
+      groupString = groupString + group + "/";// NOPMD
+    }
+    return this.record.getValueInW() + ";" + groupString + "]";
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.record, this.groups);
+  }
+
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (obj instanceof ValueGroup) {
+      final ValueGroup other = (ValueGroup) obj;
+      return Objects.equals(this.record.getValueInW(), other.getValueInW())
+          && this.groups.containsAll(other.groups);
+    }
+    return false;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroupSerializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroupSerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..e136d1da0cd8362fed4f76807e7f8725c2075b7f
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroupSerializer.java
@@ -0,0 +1,33 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+
+/** A pipeline serializer for the HourOfDayKey to allow for parallelization. */
+public class ValueGroupSerializer implements StreamSerializer<ValueGroup> {
+
+  private static final int TYPE_ID = 1;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final ValueGroup key) throws IOException {
+    out.writeObject(key);
+    out.writeString(String.join(",", key.getGroups()));
+  }
+
+  @Override
+  public ValueGroup read(final ObjectDataInput in) throws IOException {
+    return new ValueGroup(in.readObject(ValueGroup.class),
+        new HashSet<>(Arrays.asList(in.readString().split(","))));
+  }
+
+}
+
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..e3371cc87e20e85e6e8c327955537e6e49dab86e
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java b/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..b74c2874b92a51b138ffe8b44b1cf750dfce5880
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java
@@ -0,0 +1,226 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
+
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JetConfig;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.core.JetTestSupport;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
+import com.hazelcast.jet.pipeline.test.Assertions;
+import com.hazelcast.jet.pipeline.test.TestSources;
+import com.hazelcast.jet.test.SerialTest;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Objects;
+import java.util.concurrent.CompletionException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.Uc4PipelineBuilder;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ImmutableSensorRegistryUc4Serializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKey;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKeySerializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroup;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroupSerializer;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.records.ActivePowerRecord;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+import titan.ccp.model.sensorregistry.ImmutableSensorRegistry;
+import titan.ccp.model.sensorregistry.MachineSensor;
+import titan.ccp.model.sensorregistry.MutableAggregatedSensor;
+import titan.ccp.model.sensorregistry.MutableSensorRegistry;
+
+@Category(SerialTest.class)
+public class Uc4PipelineTest extends JetTestSupport {
+
+  // TEst Machinery
+  JetInstance testInstance = null;
+  Pipeline testPipeline = null;
+  StreamStage<Entry<String, AggregatedActivePowerRecord>> uc4Topology = null;
+
+  @Before
+  public void buildUc4Pipeline() {
+
+    // Setup Configuration
+    final int testItemsPerSecond = 2;
+    final String testSensorName = "TEST-SENSOR";
+    final String testLevel1GroupName = "TEST-LEVEL1-GROUP";
+    final String testLevel2GroupName = "TEST-LEVEL2-GROUP";
+    final Double testValueInW = 10.0;
+    final int testWindowSize = 5000; // As window size is bugged, not necessary.
+
+    // Create mock jet instance with configuration
+    final String testClusterName = randomName();
+    final JetConfig testJetConfig = new JetConfig();
+    testJetConfig.getHazelcastConfig().setClusterName(testClusterName);
+    this.testInstance = this.createJetMember(testJetConfig);
+
+    // Create test source 1 : Input Values
+    final StreamSource<Entry<String, ActivePowerRecord>> testInputSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final ActivePowerRecord testRecord =
+              new ActivePowerRecord(testSensorName, timestamp, testValueInW);
+          final Entry<String, ActivePowerRecord> testEntry =
+              Map.entry(testSensorName, testRecord);
+          return testEntry;
+        });
+
+    // Create test source 2 : Mock aggregation Values
+    final StreamSource<Entry<String, AggregatedActivePowerRecord>> testAggregationSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+
+          AggregatedActivePowerRecord test =
+              new AggregatedActivePowerRecord(testSensorName,
+                  System.currentTimeMillis(),
+                  1L,
+                  testValueInW,
+                  testValueInW);
+
+          final ActivePowerRecord testAggValue =
+              new ActivePowerRecord(testSensorName,
+                  System.currentTimeMillis(),
+                  testValueInW);
+
+          final Entry<String, AggregatedActivePowerRecord> testEntry =
+              Map.entry(testLevel1GroupName, test);
+          return testEntry;
+        });
+
+
+    // Create test source 3 : Mock Config Values
+    final StreamSource<Entry<Event, String>> testConfigSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final Event theEvent = Event.SENSOR_REGISTRY_CHANGED;
+
+          // Topology:
+          // level2Group -> level1Group -> testSensor
+          
+          // Create Registry
+          final MutableSensorRegistry testRegistry = new MutableSensorRegistry(testLevel2GroupName);
+          // Add Sensors
+          final MutableAggregatedSensor topLevelSensor = testRegistry.getTopLevelSensor();
+          final MutableAggregatedSensor level1GroupSensor =
+              topLevelSensor.addChildAggregatedSensor(testLevel1GroupName);
+          final MachineSensor inputSensor = level1GroupSensor.addChildMachineSensor(testSensorName);
+
+          final String stringRegistry = testRegistry.toJson();
+          final Entry<Event, String> testEntry =
+              Map.entry(theEvent, stringRegistry);
+          return testEntry;
+        });
+
+    // Create pipeline to test
+    final Uc4PipelineBuilder pipelineBuilder = new Uc4PipelineBuilder();
+    this.testPipeline = Pipeline.create();
+    this.uc4Topology = pipelineBuilder.extendUc4Topology(testPipeline,
+        testInputSource, testAggregationSource, testConfigSource, testWindowSize);
+
+    this.uc4Topology.writeTo(Sinks.logger());
+  }
+
+  /**
+   * Tests if no items reach the end before the first window ends.
+   */
+  @Test
+  public void testOutput() {
+
+//    System.out.println("DEBUG DEBUG DEBUG || ENTERED TEST 1");
+    
+    // Assertion Configuration
+    final int timeout = 20;
+    final String testSensorName = "TEST-SENSOR";
+    final String testLevel1GroupName = "TEST-LEVEL1-GROUP";
+    final String testLevel2GroupName = "TEST-LEVEL2-GROUP";
+    final double testValueInW = 10.0;
+
+
+    // Assertion
+    this.uc4Topology.apply(Assertions.assertCollectedEventually(timeout, 
+        collection -> {
+          System.out.println("DEBUG || ENTERED ASSERTION COLLECTED EVENTUALLY");
+
+          boolean allOkay = false;
+
+          boolean testLevel1contained = false;
+          boolean testLevel2contained = false;
+          boolean averageEqTest = true;
+          boolean avOk = true;
+
+
+          if (collection != null) {
+            System.out.println("Collection size: " + collection.size());
+
+
+            for (final Entry<String, AggregatedActivePowerRecord> entry : collection) {
+              System.out.println("DEBUG || " + entry.toString());
+
+              final String key = entry.getKey();
+              final AggregatedActivePowerRecord agg = entry.getValue();
+
+
+              if (Objects.equals(key, testLevel1GroupName)) {
+                testLevel1contained = true;
+              }
+
+              if(Objects.equals(key, testLevel2GroupName)){
+                testLevel2contained = true;
+              }
+
+              if (testValueInW != agg.getAverageInW()){
+                averageEqTest = false;
+              }
+
+              final double average = agg.getSumInW() / agg.getCount();
+              if (average != agg.getAverageInW()) {
+                avOk = false;
+              }
+
+            }
+            allOkay = testLevel1contained && testLevel2contained && averageEqTest && avOk;
+          }
+
+          System.out.println("testLevel1contained: " + testLevel1contained);
+          System.out.println("testLevel2contained: " + testLevel2contained);
+          System.out.println("averageEqTest: " + averageEqTest);
+          System.out.println("avOk: " + avOk);
+
+          Assert.assertTrue("Assertion did not complete!", allOkay);
+          
+        }));
+
+    try{
+
+      final JobConfig jobConfig = new JobConfig()
+          .registerSerializer(ValueGroup.class, ValueGroupSerializer.class)
+          .registerSerializer(SensorGroupKey.class, SensorGroupKeySerializer.class)
+          .registerSerializer(ImmutableSensorRegistry.class,
+              ImmutableSensorRegistryUc4Serializer.class);
+      this.testInstance.newJob(this.testPipeline, jobConfig).join();
+
+    } catch (final CompletionException e) {
+      final String errorMsg = e.getCause().getMessage();
+      Assert.assertTrue(
+          "Job was expected to complete with AssertionCompletedException, but completed with: "
+              + e.getCause(),
+          errorMsg.contains(AssertionCompletedException.class.getName()));
+    } catch (Exception e){
+      System.out.println("ERRORORORO TEST BROKEN !!!!");
+      System.out.println(e);
+    }
+  }
+
+
+  @After
+  public void after() {
+    System.out.println("Shutting down");
+    // Shuts down all running Jet Instances
+    Jet.shutdownAll();
+  }
+
+}
diff --git a/theodolite/README.md b/theodolite/README.md
index f662329f7eda3a39632581b7125a2f2f2feced8a..49019813c43e0b19e32e35703ca294b2b5c54cb0 100644
--- a/theodolite/README.md
+++ b/theodolite/README.md
@@ -51,7 +51,7 @@ Or, if you don't have GraalVM installed, you can run the native executable build
 ```
 
 You can then execute your native executable with:
-```./build/theodolite-0.7.0-SNAPSHOT-runner```
+```./build/theodolite-0.8.0-SNAPSHOT-runner```
 
 If you want to learn more about building native executables, please consult https://quarkus.io/guides/gradle-tooling.
 
diff --git a/theodolite/build.gradle b/theodolite/build.gradle
index 521137d7315de193f26fdf2307155e587c0dd921..7e10245b8605ba926ac171e260bc145378a0d8d8 100644
--- a/theodolite/build.gradle
+++ b/theodolite/build.gradle
@@ -37,7 +37,7 @@ dependencies {
 }
 
 group 'theodolite'
-version '0.7.0-SNAPSHOT'
+version '0.8.0-SNAPSHOT'
 
 java {
     sourceCompatibility = JavaVersion.VERSION_11
diff --git a/theodolite/crd/crd-benchmark.yaml b/theodolite/crd/crd-benchmark.yaml
index c901e61360c05b2f1cf2b1767a20f624eb262231..d2418ee005e2c0168254a9423b9c383ace2d3ca7 100644
--- a/theodolite/crd/crd-benchmark.yaml
+++ b/theodolite/crd/crd-benchmark.yaml
@@ -26,6 +26,10 @@ spec:
                 description: This field exists only for technical reasons and should not be set by the user. The value of the field will be overwritten.
                 type: string
                 default: ""
+              waitForResourcesEnabled:
+                description: If true, Theodolite waits to create the resource for the SUT until the infrastructure resources are ready, and analogously, Theodolite waits to create the load-gen resource until the resources of the SUT are ready.
+                type: boolean
+                default: false
               infrastructure:
                 description: (Optional) A list of file names that reference Kubernetes resources that are deployed on the cluster to create the required infrastructure.
                 type: object
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/Action.kt b/theodolite/src/main/kotlin/theodolite/benchmark/Action.kt
index 35efebdc0fb2a3748660cb76cdd5499b4ca5f622..8bd16d04d6a5e5ef3f362ff7d5611bf73e367a7e 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/Action.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/Action.kt
@@ -23,7 +23,7 @@ class Action {
                 timeout = exec.timeoutSeconds,
                 command = exec.command
         )
-            if(exitCode != 0){
+        if (exitCode != 0){
             throw ActionCommandFailedException("Error while executing action, finished with exit code $exitCode")
         }
     }
@@ -38,7 +38,7 @@ class ActionSelector {
 @JsonDeserialize
 @RegisterForReflection
 class PodSelector {
-    lateinit var matchLabels: MutableMap<String, String>
+    lateinit var matchLabels: Map<String, String>
 }
 @JsonDeserialize
 @RegisterForReflection
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/ActionCommand.kt b/theodolite/src/main/kotlin/theodolite/benchmark/ActionCommand.kt
index a4345c43ac6a75667c3c3e85c8534697193e1458..9f0578f7d1456d823a29049daae6dbe886c95e2a 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/ActionCommand.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/ActionCommand.kt
@@ -33,7 +33,7 @@ class ActionCommand(val client: NamespacedKubernetesClient) {
      * @return the exit code of this executed command
      */
     fun exec(
-        matchLabels: MutableMap<String, String>,
+        matchLabels: Map<String, String>,
         command: Array<String>,
         timeout: Long = Configuration.TIMEOUT_SECONDS,
         container: String = ""
@@ -58,7 +58,7 @@ class ActionCommand(val client: NamespacedKubernetesClient) {
 
             val latchTerminationStatus = execLatch.await(timeout, TimeUnit.SECONDS)
             if (!latchTerminationStatus) {
-                throw ActionCommandFailedException("Latch could not terminate within specified time")
+                throw ActionCommandFailedException("Timeout while running action command")
             }
             execWatch.close()
         } catch (e: Exception) {
@@ -112,7 +112,7 @@ class ActionCommand(val client: NamespacedKubernetesClient) {
      * it can take a while until the status is ready and the pod can be selected.
      * @return the name of the pod or throws [ActionCommandFailedException]
      */
-    fun getPodName(matchLabels: MutableMap<String, String>, tries: Int): String {
+    fun getPodName(matchLabels: Map<String, String>, tries: Int): String {
         for (i in 1..tries) {
 
             try {
@@ -125,7 +125,7 @@ class ActionCommand(val client: NamespacedKubernetesClient) {
         throw ActionCommandFailedException("Couldn't find any pod that matches the specified labels.")
     }
 
-    private fun getPodName(matchLabels: MutableMap<String, String>): String {
+    private fun getPodName(matchLabels: Map<String, String>): String {
         return try {
             val podNames = this.client
                 .pods()
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt
index c0db521d1e80f0a6b284dac5f422fcd22dad9752..599f723621db2b86c9163af9351ac896f82b2b86 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt
@@ -8,8 +8,10 @@ import io.fabric8.kubernetes.client.NamespacedKubernetesClient
 import io.quarkus.runtime.annotations.RegisterForReflection
 import mu.KotlinLogging
 import theodolite.k8s.K8sManager
+import theodolite.patcher.PatchHandler
 import theodolite.patcher.PatcherFactory
 import theodolite.util.*
+import kotlin.properties.Delegates
 
 
 private val logger = KotlinLogging.logger {}
@@ -21,13 +23,13 @@ private var DEFAULT_THEODOLITE_APP_RESOURCES = "./benchmark-resources"
  * Represents a benchmark in Kubernetes. An example for this is the BenchmarkType.yaml
  * Contains a of:
  * - [name] of the benchmark,
- * - [appResource] list of the resources that have to be deployed for the benchmark,
- * - [loadGenResource] resource that generates the load,
+ * - [infrastructure] resources that have to be deployed for the benchmark infrastructure
+ * - [sut] list of the resources that have to be deployed for the benchmark,
+ * - [loadGenerator] resource that generates the load,
  * - [resourceTypes] types of scaling resources,
  * - [loadTypes] types of loads that can be scaled for the benchmark,
  * - [kafkaConfig] for the [theodolite.k8s.TopicManager],
  * - [namespace] for the client,
- * - [path] under which the resource yamls can be found.
  *
  *  This class is used for the parsing(in the [theodolite.execution.TheodoliteStandalone]) and
  *  for the deserializing in the [theodolite.execution.operator.TheodoliteOperator].
@@ -37,6 +39,7 @@ private var DEFAULT_THEODOLITE_APP_RESOURCES = "./benchmark-resources"
 @RegisterForReflection
 class KubernetesBenchmark : KubernetesResource, Benchmark {
     lateinit var name: String
+    var waitForResourcesEnabled = false
     lateinit var resourceTypes: List<TypeName>
     lateinit var loadTypes: List<TypeName>
     var kafkaConfig: KafkaConfig? = null
@@ -64,14 +67,13 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
 
     override fun setupInfrastructure() {
         this.infrastructure.beforeActions.forEach { it.exec(client = client) }
-        val kubernetesManager = K8sManager(this.client)
-        loadResources(this.infrastructure.resources)
-            .map { it.second }
-            .forEach { kubernetesManager.deploy(it) }
+        RolloutManager(waitForResourcesEnabled, this.client)
+            .rollout(loadResources(this.infrastructure.resources).map { it.second })
     }
 
     override fun teardownInfrastructure() {
         val kubernetesManager = K8sManager(this.client)
+
         loadResources(this.infrastructure.resources)
             .map { it.second }
             .forEach { kubernetesManager.remove(it) }
@@ -89,33 +91,39 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
      * @return a [BenchmarkDeployment]
      */
     override fun buildDeployment(
-            load: Int,
-            loadPatcherDefinitions: List<PatcherDefinition>,
-            resource: Int,
-            resourcePatcherDefinitions: List<PatcherDefinition>,
-            configurationOverrides: List<ConfigurationOverride?>,
-            loadGenerationDelay: Long,
-            afterTeardownDelay: Long
+        load: Int,
+        loadPatcherDefinitions: List<PatcherDefinition>,
+        resource: Int,
+        resourcePatcherDefinitions: List<PatcherDefinition>,
+        configurationOverrides: List<ConfigurationOverride?>,
+        loadGenerationDelay: Long,
+        afterTeardownDelay: Long
     ): BenchmarkDeployment {
         logger.info { "Using $namespace as namespace." }
 
-        val appResources = loadResources(this.sut.resources)
-        val loadGenResources = loadResources(this.loadGenerator.resources)
 
-        val patcherFactory = PatcherFactory()
+        val appResources = loadResources(this.sut.resources).toResourceMap()
+        val loadGenResources = loadResources(this.loadGenerator.resources).toResourceMap()
 
         // patch the load dimension the resources
         loadPatcherDefinitions.forEach { patcherDefinition ->
-            patcherFactory.createPatcher(patcherDefinition, loadGenResources).patch(load.toString())
+            loadGenResources[patcherDefinition.resource] =
+                PatchHandler.patchResource(loadGenResources, patcherDefinition, load.toString())
         }
         resourcePatcherDefinitions.forEach { patcherDefinition ->
-            patcherFactory.createPatcher(patcherDefinition, appResources).patch(resource.toString())
+            appResources[patcherDefinition.resource] =
+                PatchHandler.patchResource(appResources, patcherDefinition, resource.toString())
         }
 
-        // Patch the given overrides
         configurationOverrides.forEach { override ->
             override?.let {
-                patcherFactory.createPatcher(it.patcher, appResources + loadGenResources).patch(override.value)
+                if (appResources.keys.contains(it.patcher.resource)) {
+                    appResources[it.patcher.resource] =
+                        PatchHandler.patchResource(appResources, override.patcher, override.value)
+                } else {
+                    loadGenResources[it.patcher.resource] =
+                        PatchHandler.patchResource(loadGenResources, override.patcher, override.value)
+                }
             }
         }
 
@@ -126,13 +134,15 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
             sutAfterActions = sut.afterActions,
             loadGenBeforeActions = loadGenerator.beforeActions,
             loadGenAfterActions = loadGenerator.afterActions,
-            appResources = appResources.map { it.second },
-            loadGenResources = loadGenResources.map { it.second },
+            appResources = appResources.toList().flatMap { it.second },
+            loadGenResources = loadGenResources.toList().flatMap { it.second },
             loadGenerationDelay = loadGenerationDelay,
             afterTeardownDelay = afterTeardownDelay,
             kafkaConfig = if (kafkaConfig != null) mapOf("bootstrap.servers" to kafkaConfig.bootstrapServer) else mapOf(),
             topics = kafkaConfig?.topics ?: listOf(),
-            client = this.client
+            client = this.client,
+            rolloutMode = waitForResourcesEnabled
+
         )
     }
 
@@ -145,3 +155,11 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
         this.client = client
     }
 }
+
+private fun Collection<Pair<String, HasMetadata>>.toResourceMap(): MutableMap<String, List<HasMetadata>> {
+    return this.toMap()
+        .toMutableMap()
+        .map { Pair(it.key, listOf(it.value)) }
+        .toMap()
+        .toMutableMap()
+}
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt
index b30032c524b1e421301e0e9d1ffe83772b43d900..1d7b22233c084625cf16ca7194c76c14601bbaad 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt
@@ -28,6 +28,7 @@ class KubernetesBenchmarkDeployment(
     private val sutAfterActions: List<Action>,
     private val loadGenBeforeActions: List<Action>,
     private val loadGenAfterActions: List<Action>,
+    private val rolloutMode: Boolean,
     val appResources: List<HasMetadata>,
     val loadGenResources: List<HasMetadata>,
     private val loadGenerationDelay: Long,
@@ -47,19 +48,20 @@ class KubernetesBenchmarkDeployment(
      *  - Deploy the needed resources.
      */
     override fun setup() {
+        val rolloutManager = RolloutManager(rolloutMode, client)
         if (this.topics.isNotEmpty()) {
             val kafkaTopics = this.topics
                 .filter { !it.removeOnly }
                 .map { NewTopic(it.name, it.numPartitions, it.replicationFactor) }
             kafkaController.createTopics(kafkaTopics)
         }
+
         sutBeforeActions.forEach { it.exec(client = client) }
-        appResources.forEach { kubernetesManager.deploy(it) }
+        rolloutManager.rollout(appResources)
         logger.info { "Wait ${this.loadGenerationDelay} seconds before starting the load generator." }
         Thread.sleep(Duration.ofSeconds(this.loadGenerationDelay).toMillis())
         loadGenBeforeActions.forEach { it.exec(client = client) }
-        loadGenResources.forEach { kubernetesManager.deploy(it) }
-
+        rolloutManager.rollout(loadGenResources)
     }
 
     /**
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/ResourceSets.kt b/theodolite/src/main/kotlin/theodolite/benchmark/ResourceSets.kt
index 0626a6e24369348d50b60fbb555665c58dd17281..2ee8d8cf5c0e8590728bc253fd452fe8aa1d9d96 100644
--- a/theodolite/src/main/kotlin/theodolite/benchmark/ResourceSets.kt
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/ResourceSets.kt
@@ -11,7 +11,7 @@ import theodolite.util.DeploymentFailedException
 
 @JsonDeserialize
 @RegisterForReflection
-class ResourceSets: KubernetesResource {
+class ResourceSets : KubernetesResource {
     @JsonProperty("configMap")
     @JsonInclude(JsonInclude.Include.NON_NULL)
     var configMap: ConfigMapResourceSet? = null
@@ -21,13 +21,13 @@ class ResourceSets: KubernetesResource {
     var fileSystem: FileSystemResourceSet? = null
 
     fun loadResourceSet(client: NamespacedKubernetesClient): Collection<Pair<String, HasMetadata>> {
-        // TODO Find out whether field access (::configMap) is really what we want to do here (see #362)
-        return if (::configMap != null) {
-                configMap?.getResourceSet(client= client) !!
-            } else if (::fileSystem != null) {
-                fileSystem?.getResourceSet(client= client ) !!
-            } else {
-                throw DeploymentFailedException("Could not load resourceSet.")
-            }
+
+        return if (this.configMap != null) {
+            configMap?.getResourceSet(client = client)!!
+        } else if (this.fileSystem != null) {
+            fileSystem?.getResourceSet(client = client)!!
+        } else {
+            throw DeploymentFailedException("Could not load resourceSet.")
+        }
     }
 }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/benchmark/RolloutManager.kt b/theodolite/src/main/kotlin/theodolite/benchmark/RolloutManager.kt
new file mode 100644
index 0000000000000000000000000000000000000000..f282fb27971218754a0e35801342efc83837b64b
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/benchmark/RolloutManager.kt
@@ -0,0 +1,42 @@
+package theodolite.benchmark
+
+import io.fabric8.kubernetes.api.model.HasMetadata
+import io.fabric8.kubernetes.api.model.Pod
+import io.fabric8.kubernetes.api.model.apps.DaemonSet
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.fabric8.kubernetes.api.model.apps.ReplicaSet
+import io.fabric8.kubernetes.api.model.apps.StatefulSet
+import io.fabric8.kubernetes.api.model.batch.v1.Job
+import io.fabric8.kubernetes.client.NamespacedKubernetesClient
+import theodolite.k8s.K8sManager
+
+private var SLEEP_TIME_MS = 500L
+
+
+class RolloutManager(private val blockUntilResourcesReady: Boolean, private val client: NamespacedKubernetesClient) {
+
+    fun rollout(resources: List<HasMetadata>) {
+        resources
+            .forEach { K8sManager(client).deploy(it) }
+
+        if (blockUntilResourcesReady) {
+            resources
+                .forEach {
+                    when (it) {
+                        is Deployment -> waitFor { client.apps().deployments().withName(it.metadata.name).isReady }
+                        is StatefulSet -> waitFor { client.apps().statefulSets().withName(it.metadata.name).isReady }
+                        is DaemonSet -> waitFor { client.apps().daemonSets().withName(it.metadata.name).isReady }
+                        is ReplicaSet -> waitFor { client.apps().replicaSets().withName(it.metadata.name).isReady }
+                        is Job -> waitFor { client.batch().v1().cronjobs().withName(it.metadata.name).isReady }
+                    }
+                }
+        }
+    }
+
+    private fun waitFor(isResourceReady: () -> Boolean) {
+        while (!isResourceReady()) {
+            Thread.sleep(SLEEP_TIME_MS)
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/AnalysisExecutor.kt b/theodolite/src/main/kotlin/theodolite/evaluation/AnalysisExecutor.kt
index 70a20fead9a35c03c30d7238c8e3524166b8797e..9e460fd0befafef8a644de870a5b33ccdfcf2029 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/AnalysisExecutor.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/AnalysisExecutor.kt
@@ -37,7 +37,7 @@ class AnalysisExecutor(
 
         try {
             val ioHandler = IOHandler()
-            val resultsFolder: String = ioHandler.getResultFolderURL()
+            val resultsFolder = ioHandler.getResultFolderURL()
             val fileURL = "${resultsFolder}exp${executionId}_${load}_${resource}_${slo.sloType.toSlug()}"
 
             val prometheusData = executionIntervals
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/ExternalSloChecker.kt b/theodolite/src/main/kotlin/theodolite/evaluation/ExternalSloChecker.kt
index 7fb5417e200f64b0db74a8bebe69a751c5d484b8..7587e8326df98f3c45c016bfd3b2d7db8077e6d1 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/ExternalSloChecker.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/ExternalSloChecker.kt
@@ -40,7 +40,7 @@ class ExternalSloChecker(
             val result = post(externalSlopeURL, data = data, timeout = TIMEOUT)
             if (result.statusCode != 200) {
                 counter++
-                logger.error { "Could not reach external SLO checker." }
+                logger.error { "Could not reach external SLO checker at $externalSlopeURL." }
             } else {
                 val booleanResult = result.text.toBoolean()
                 logger.info { "SLO checker result is: $booleanResult." }
@@ -48,6 +48,6 @@ class ExternalSloChecker(
             }
         }
 
-        throw ConnectException("Could not reach external SLO checker")
+        throw ConnectException("Could not reach external SLO checker at $externalSlopeURL.")
     }
 }
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/MetricFetcher.kt b/theodolite/src/main/kotlin/theodolite/evaluation/MetricFetcher.kt
index e54d79fe0f95b9f6079bd4295a74e81250b73a90..b6a1857cba513f663876f88d7a7d69ad02c0bc40 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/MetricFetcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/MetricFetcher.kt
@@ -45,15 +45,16 @@ class MetricFetcher(private val prometheusURL: String, private val offset: Durat
         )
 
         while (counter < RETRIES) {
+            logger.info { "Request collected metrics from Prometheus for interval [$offsetStart,$offsetEnd]." }
             val response = get("$prometheusURL/api/v1/query_range", params = parameter, timeout = TIMEOUT)
             if (response.statusCode != 200) {
                 val message = response.jsonObject.toString()
-                logger.warn { "Could not connect to Prometheus: $message. Retrying now." }
+                logger.warn { "Could not connect to Prometheus: $message. Retry $counter/$RETRIES." }
                 counter++
             } else {
                 val values = parseValues(response)
                 if (values.data?.result.isNullOrEmpty()) {
-                    throw NoSuchFieldException("Empty query result: $values between $start and $end for query $query.")
+                    throw NoSuchFieldException("Empty query result: $values between for query '$query' in interval [$offsetStart,$offsetEnd] .")
                 }
                 return parseValues(response)
             }
diff --git a/theodolite/src/main/kotlin/theodolite/evaluation/SloConfigHandler.kt b/theodolite/src/main/kotlin/theodolite/evaluation/SloConfigHandler.kt
index b2cd269e0a6157ea23cb319cb3cfb6cb87a9d4e9..089f40dc6b5ef7d8ac4b063cae68e5e9621d1f50 100644
--- a/theodolite/src/main/kotlin/theodolite/evaluation/SloConfigHandler.kt
+++ b/theodolite/src/main/kotlin/theodolite/evaluation/SloConfigHandler.kt
@@ -4,6 +4,7 @@ import theodolite.benchmark.BenchmarkExecution
 import theodolite.util.InvalidPatcherConfigurationException
 import javax.enterprise.context.ApplicationScoped
 
+private const val DEFAULT_CONSUMER_LAG_METRIC_BASE = "kafka_consumergroup_lag"
 private const val DEFAULT_CONSUMER_LAG_QUERY = "sum by(consumergroup) (kafka_consumergroup_lag >= 0)"
 private const val DEFAULT_DROPPED_RECORDS_QUERY = "sum by(job) (kafka_streams_stream_task_metrics_dropped_records_total>=0)"
 
@@ -13,9 +14,14 @@ class SloConfigHandler {
         fun getQueryString(slo: BenchmarkExecution.Slo): String {
             return when (slo.sloType.lowercase()) {
                 SloTypes.GENERIC.value -> slo.properties["promQLQuery"] ?: throw IllegalArgumentException("promQLQuery expected")
+                SloTypes.LAG_TREND.value, SloTypes.LAG_TREND_RATIO.value -> slo.properties["promQLQuery"] ?:
+                    (slo.properties["consumerGroup"]?.let { "{consumergroup='$it'}" } ?: "").let {
+                        "sum by(consumergroup) ($DEFAULT_CONSUMER_LAG_METRIC_BASE$it >= 0)"
+                    }
+                SloTypes.DROPPED_RECORDS.value, SloTypes.DROPPED_RECORDS_RATIO.value -> slo.properties["promQLQuery"] ?: DEFAULT_DROPPED_RECORDS_QUERY
                 SloTypes.LAG_TREND.value, SloTypes.LAG_TREND_RATIO.value -> slo.properties["promQLQuery"] ?: DEFAULT_CONSUMER_LAG_QUERY
                 SloTypes.DROPPED_RECORDS.value, SloTypes.DROPPED_RECORDS_RATIO.value -> slo.properties["promQLQuery"] ?: DEFAULT_DROPPED_RECORDS_QUERY
-                else -> throw  InvalidPatcherConfigurationException("Could not find Prometheus query string for slo type $slo.sloType")
+                else -> throw  InvalidPatcherConfigurationException("Could not find Prometheus query string for slo type ${slo.sloType}")
             }
         }
     }
diff --git a/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt b/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt
index 6dcfb582655ff9295aedd63d8c30cbac7daae2b3..c20b2ba87e386dc7c0a14245e03bedfb067720e6 100644
--- a/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt
+++ b/theodolite/src/main/kotlin/theodolite/execution/operator/BenchmarkStateChecker.kt
@@ -190,7 +190,7 @@ class BenchmarkStateChecker(
     }
 }
 
-private fun <K, V> MutableMap<K, V>.containsMatchLabels(matchLabels: MutableMap<V, V>): Boolean {
+private fun <K, V> Map<K, V>.containsMatchLabels(matchLabels: Map<V, V>): Boolean {
     for (kv in matchLabels) {
         if (kv.value != this[kv.key as K]) {
             return false
diff --git a/theodolite/src/main/kotlin/theodolite/execution/operator/ClusterSetup.kt b/theodolite/src/main/kotlin/theodolite/execution/operator/ClusterSetup.kt
index 885315df6eda0d91a27567720056738b997a8ec1..e67be01ea80178b6d6bfb01b32bfd28c111addb9 100644
--- a/theodolite/src/main/kotlin/theodolite/execution/operator/ClusterSetup.kt
+++ b/theodolite/src/main/kotlin/theodolite/execution/operator/ClusterSetup.kt
@@ -1,13 +1,17 @@
 package theodolite.execution.operator
 
+import io.fabric8.kubernetes.client.KubernetesClientException
 import io.fabric8.kubernetes.client.NamespacedKubernetesClient
 import io.fabric8.kubernetes.client.dsl.MixedOperation
 import io.fabric8.kubernetes.client.dsl.Resource
+import mu.KotlinLogging
 import theodolite.execution.Shutdown
 import theodolite.k8s.K8sContextFactory
 import theodolite.k8s.ResourceByLabelHandler
 import theodolite.model.crd.*
 
+private val logger = KotlinLogging.logger {}
+
 class ClusterSetup(
     private val executionCRDClient: MixedOperation<ExecutionCRD, BenchmarkExecutionList, Resource<ExecutionCRD>>,
     private val benchmarkCRDClient: MixedOperation<BenchmarkCRD, KubernetesBenchmarkList, Resource<BenchmarkCRD>>,
@@ -75,10 +79,15 @@ class ClusterSetup(
             labelName = "app.kubernetes.io/created-by",
             labelValue = "theodolite"
         )
-        resourceRemover.removeCR(
-            labelName = "app.kubernetes.io/created-by",
-            labelValue = "theodolite",
-            context = serviceMonitorContext
-        )
+        try {
+            resourceRemover.removeCR(
+                labelName = "app.kubernetes.io/created-by",
+                labelValue = "theodolite",
+                context = serviceMonitorContext
+            )
+        } catch (e: KubernetesClientException) {
+            logger.warn { "Service monitors could not be cleaned up. It may be that service monitors are not registered by the Kubernetes API."}
+            logger.debug { "Error is: ${e.message}" }
+        }
     }
 }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/AbstractPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/AbstractPatcher.kt
index df80e9cbd2503685a7dbed35db5319920dfc42cb..fbbb7fa1d2ea9fd67732ea5b84f29012c5708136 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/AbstractPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/AbstractPatcher.kt
@@ -1,15 +1,13 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
+import io.fabric8.kubernetes.client.utils.Serialization
 
 /**
  * A Patcher is able to modify values of a Kubernetes resource, see [Patcher].
  *
  * An AbstractPatcher is created with up to three parameters.
  *
- * @param k8sResource The Kubernetes resource to be patched.
- * @param container *(optional)* The name of the container to be patched
- * @param variableName *(optional)* The variable name to be patched
  *
  *
  * **For example** to patch the load dimension of a load generator, the patcher should be created as follow:
@@ -19,6 +17,14 @@ import io.fabric8.kubernetes.api.model.KubernetesResource
  * variableName: `NUM_SENSORS`
  *
  */
-abstract class AbstractPatcher(
-    k8sResource: KubernetesResource
-) : Patcher
+abstract class AbstractPatcher : Patcher {
+
+    override fun patch(resources: List<HasMetadata>, value: String) : List<HasMetadata> {
+        return resources
+            .map { Serialization.clone(it)}
+            .map { patchSingleResource(it, value) }
+    }
+
+    abstract fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata
+
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/DataVolumeLoadGeneratorReplicaPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/DataVolumeLoadGeneratorReplicaPatcher.kt
index bdc107910edc8ddfb41e7757c775977086a25a26..db019282fd14c8a7aaa6eba7cd3969ba42da8023 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/DataVolumeLoadGeneratorReplicaPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/DataVolumeLoadGeneratorReplicaPatcher.kt
@@ -1,6 +1,6 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 
 /**
  * The DataVolumeLoadGeneratorReplicaPatcher takes the total load that should be generated
@@ -10,29 +10,29 @@ import io.fabric8.kubernetes.api.model.KubernetesResource
  * The number of instances are set for the load generator and the given variable is set to the
  * load per instance.
  *
- * @property k8sResource Kubernetes resource to be patched.
  * @property maxVolume per load generator instance
  * @property container Container to be patched.
  * @property variableName Name of the environment variable to be patched.
  */
 class DataVolumeLoadGeneratorReplicaPatcher(
-    k8sResource: KubernetesResource,
     private val maxVolume: Int,
-    container: String,
-    variableName: String
-) : AbstractPatcher(k8sResource) {
+    private val container: String,
+    private val variableName: String
+) : Patcher {
 
-    private val replicaPatcher = ReplicaPatcher(k8sResource)
-    private val envVarPatcher = EnvVarPatcher(k8sResource, container, variableName)
+    override fun patch(resources: List<HasMetadata>, value: String) : List<HasMetadata> {
+        return resources.flatMap { patchSingeResource(it, value)}
+    }
 
-    override fun <T> patch(value: T) {
+    fun patchSingeResource(k8sResource: HasMetadata, value: String): List<HasMetadata> {
+        var resource = k8sResource
         // calculate number of load generator instances and load per instance
-        val load = Integer.parseInt(value.toString())
+        val load = Integer.parseInt(value)
         val loadGenInstances = (load + maxVolume - 1) / maxVolume
         val loadPerInstance = load / loadGenInstances
 
         // Patch instance values and load value of generators
-        replicaPatcher.patch(loadGenInstances.toString())
-        envVarPatcher.patch(loadPerInstance.toString())
+        val resourceList = ReplicaPatcher().patch(listOf(resource), loadGenInstances.toString())
+        return EnvVarPatcher(this.container, this.variableName).patch(resourceList, loadPerInstance.toString())
     }
 }
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/EnvVarPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/EnvVarPatcher.kt
index 416aec74a3af9b74594f5e6cd018682bf91cbf63..ee95871211145e740a64e711996b85af98ee2151 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/EnvVarPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/EnvVarPatcher.kt
@@ -2,29 +2,30 @@ package theodolite.patcher
 
 import io.fabric8.kubernetes.api.model.Container
 import io.fabric8.kubernetes.api.model.EnvVar
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.apps.Deployment
 
 /**
  * The EnvVarPatcher allows to modify the value of an environment variable
  *
- * @property k8sResource Kubernetes resource to be patched.
  * @property container Container to be patched.
  * @property variableName Name of the environment variable to be patched.
  */
 class EnvVarPatcher(
-    private val k8sResource: KubernetesResource,
     private val container: String,
     private val variableName: String
-) : AbstractPatcher(k8sResource) {
+) : AbstractPatcher() {
 
-    override fun <String> patch(value: String) {
-        if (k8sResource is Deployment) {
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is Deployment) {
             this.setEnv(
-                k8sResource, this.container,
-                mapOf(this.variableName to value) as Map<kotlin.String, kotlin.String>
+                resource, this.container,
+                mapOf(this.variableName to value)
             )
+            return resource
         }
+        return resource
     }
 
     /**
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/ImagePatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/ImagePatcher.kt
index 8f6753372076c119324dc962112928253633b6b0..2918c825931eb0bb4ca8ad176224e79815272b67 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/ImagePatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/ImagePatcher.kt
@@ -1,27 +1,37 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.apps.Deployment
 import io.fabric8.kubernetes.api.model.apps.StatefulSet
+import io.fabric8.kubernetes.client.utils.Serialization
 
 /**
  * The Image patcher allows to change the image of a container.
  *
- * @param k8sResource Kubernetes resource to be patched.
  * @param container Container to be patched.
  */
-class ImagePatcher(private val k8sResource: KubernetesResource, private val container: String) :
-    AbstractPatcher(k8sResource) {
+class ImagePatcher(
+    private val container: String) :
+    AbstractPatcher() {
 
-    override fun <String> patch(imagePath: String) {
-        if (k8sResource is Deployment) {
-            k8sResource.spec.template.spec.containers.filter { it.name == container }.forEach {
-                it.image = imagePath as kotlin.String
+    override fun patch(resources: List<HasMetadata>, value: String) : List<HasMetadata> {
+        return resources
+            .map { Serialization.clone(it) }
+            .map { patchSingleResource(it, value as kotlin.String) }
+    }
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is Deployment) {
+            (resource).spec.template.spec.containers.filter { it.name == container }.forEach {
+                it.image = value
             }
-        } else if (k8sResource is StatefulSet) {
-            k8sResource.spec.template.spec.containers.filter { it.name == container }.forEach {
-                it.image = imagePath as kotlin.String
+            return resource
+        } else if (resource is StatefulSet) {
+            (resource).spec.template.spec.containers.filter { it.name == container }.forEach {
+                it.image = value
             }
+            return resource
         }
+        return resource
     }
 }
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/LabelPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/LabelPatcher.kt
index 2f8c703afa9e826a79f0785abef493d2d448ac74..9a98f9689e28d77d3e7eea5974eff29ab4bbe0f8 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/LabelPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/LabelPatcher.kt
@@ -1,49 +1,50 @@
 package theodolite.patcher
 
 import io.fabric8.kubernetes.api.model.ConfigMap
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.GenericKubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.Service
 import io.fabric8.kubernetes.api.model.apps.Deployment
 import io.fabric8.kubernetes.api.model.apps.StatefulSet
-import io.fabric8.kubernetes.client.CustomResource
 
-class LabelPatcher(private val k8sResource: KubernetesResource, val variableName: String) :
-    AbstractPatcher(k8sResource) {
+class LabelPatcher(
+    val variableName: String) :
+    AbstractPatcher() {
 
-    override fun <String> patch(labelValue: String) {
-        if (labelValue is kotlin.String) {
-            when (k8sResource) {
-                is Deployment -> {
-                    if (k8sResource.metadata.labels == null) {
-                        k8sResource.metadata.labels = mutableMapOf()
-                    }
-                    k8sResource.metadata.labels[this.variableName] = labelValue
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        when (resource) {
+            is Deployment -> {
+                if (resource.metadata.labels == null) {
+                    resource.metadata.labels = mutableMapOf()
                 }
-                is StatefulSet -> {
-                    if (k8sResource.metadata.labels == null) {
-                        k8sResource.metadata.labels = mutableMapOf()
-                    }
-                    k8sResource.metadata.labels[this.variableName] = labelValue
+                resource.metadata.labels[this.variableName] = value
+            }
+            is StatefulSet -> {
+                if (resource.metadata.labels == null) {
+                    resource.metadata.labels = mutableMapOf()
                 }
-                is Service -> {
-                    if (k8sResource.metadata.labels == null) {
-                        k8sResource.metadata.labels = mutableMapOf()
-                    }
-                    k8sResource.metadata.labels[this.variableName] = labelValue
+                resource.metadata.labels[this.variableName] = value
+            }
+            is Service -> {
+                if (resource.metadata.labels == null) {
+                    resource.metadata.labels = mutableMapOf()
                 }
-                is ConfigMap -> {
-                    if (k8sResource.metadata.labels == null) {
-                        k8sResource.metadata.labels = mutableMapOf()
-                    }
-                    k8sResource.metadata.labels[this.variableName] = labelValue
+                resource.metadata.labels[this.variableName] = value
+
+            }
+            is ConfigMap -> {
+                if (resource.metadata.labels == null) {
+                    resource.metadata.labels = mutableMapOf()
                 }
-                is CustomResource<*, *> -> {
-                    if (k8sResource.metadata.labels == null) {
-                        k8sResource.metadata.labels = mutableMapOf()
-                    }
-                    k8sResource.metadata.labels[this.variableName] = labelValue
+                resource.metadata.labels[this.variableName] = value
+            }
+            is GenericKubernetesResource -> {
+                if (resource.metadata.labels == null) {
+                    resource.metadata.labels = mutableMapOf()
                 }
+                resource.metadata.labels[this.variableName] = value
             }
         }
+        return resource
     }
 }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/MatchLabelPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/MatchLabelPatcher.kt
index 30ff73b5da3b551119ad085adbc982180e4fc066..693d751f275d3666b5e360766eb449b8f6b639c3 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/MatchLabelPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/MatchLabelPatcher.kt
@@ -1,34 +1,33 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.apps.Deployment
 import io.fabric8.kubernetes.api.model.apps.StatefulSet
 
 /**
  * This patcher is able to set the `spec.selector.matchLabels` for a `Deployment` or `StatefulSet` Kubernetes resource.
  *
- * @property k8sResource The Kubernetes manifests to patch
  * @property variableName The matchLabel which should be set
  */
-class MatchLabelPatcher(private val k8sResource: KubernetesResource, val variableName: String) :
-    AbstractPatcher(k8sResource) {
+class MatchLabelPatcher(
+    val variableName: String) :
+    AbstractPatcher() {
 
-    override fun <String> patch(labelValue: String) {
-        if (labelValue is kotlin.String) {
-            when (k8sResource) {
-                is Deployment -> {
-                    if (k8sResource.spec.selector.matchLabels == null) {
-                        k8sResource.spec.selector.matchLabels = mutableMapOf()
-                    }
-                    k8sResource.spec.selector.matchLabels[this.variableName] = labelValue
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        when (resource) {
+            is Deployment -> {
+                if (resource.spec.selector.matchLabels == null) {
+                    resource.spec.selector.matchLabels = mutableMapOf()
                 }
-                is StatefulSet -> {
-                    if (k8sResource.spec.selector.matchLabels == null) {
-                        k8sResource.spec.selector.matchLabels = mutableMapOf()
-                    }
-                    k8sResource.spec.selector.matchLabels[this.variableName] = labelValue
+                resource.spec.selector.matchLabels[this.variableName] = value
+            }
+            is StatefulSet -> {
+                if (resource.spec.selector.matchLabels == null) {
+                    resource.spec.selector.matchLabels = mutableMapOf()
                 }
+                resource.spec.selector.matchLabels[this.variableName] = value
             }
         }
+        return resource
     }
 }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/NamePatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/NamePatcher.kt
new file mode 100644
index 0000000000000000000000000000000000000000..74fae390145a10d487b9c39628e67965999593e4
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/patcher/NamePatcher.kt
@@ -0,0 +1,35 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.ConfigMap
+import io.fabric8.kubernetes.api.model.GenericKubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
+import io.fabric8.kubernetes.api.model.Service
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.fabric8.kubernetes.api.model.apps.StatefulSet
+
+class NamePatcher : AbstractPatcher() {
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        when (resource) {
+            is Deployment -> {
+                resource.metadata.name = value
+            }
+            is StatefulSet -> {
+                resource.metadata.name = value
+            }
+            is Service -> {
+                resource.metadata.name = value
+            }
+            is ConfigMap -> {
+                resource.metadata.name = value
+            }
+            is io.fabric8.kubernetes.api.model.networking.v1.Ingress -> {
+                resource.metadata.name = value
+            }
+            is GenericKubernetesResource -> {
+                resource.metadata.name = value
+            }
+        }
+        return resource
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/NodeSelectorPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/NodeSelectorPatcher.kt
index 0e8cd553a6c6a9ed6fa2c8cc1b84e4cfebe79d73..b608d3b10440a19998f81776642562d337a4642a 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/NodeSelectorPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/NodeSelectorPatcher.kt
@@ -1,19 +1,22 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.apps.Deployment
 
 /**
  * The Node selector patcher make it possible to set the NodeSelector of a Kubernetes deployment.
  *
- * @param k8sResource Kubernetes resource to be patched.
  * @param variableName The `label-key` of the node for which the `label-value` is to be patched.
  */
-class NodeSelectorPatcher(private val k8sResource: KubernetesResource, private val variableName: String) :
-    AbstractPatcher(k8sResource) {
-    override fun <String> patch(value: String) {
-        if (k8sResource is Deployment) {
-            k8sResource.spec.template.spec.nodeSelector = mapOf(variableName to value as kotlin.String)
+class NodeSelectorPatcher(
+    private val variableName: String) :
+    AbstractPatcher() {
+
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is Deployment) {
+            resource.spec.template.spec.nodeSelector = mapOf(variableName to value)
         }
+        return resource
     }
 }
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/NumNestedGroupsLoadGeneratorReplicaPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/NumNestedGroupsLoadGeneratorReplicaPatcher.kt
index c617917e6894c3a30779dd4257a96365ded35481..deee1b6efebe98f52e2d19c5cbe2e4c68174ed8f 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/NumNestedGroupsLoadGeneratorReplicaPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/NumNestedGroupsLoadGeneratorReplicaPatcher.kt
@@ -1,23 +1,23 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.apps.Deployment
 import kotlin.math.pow
 
 class NumNestedGroupsLoadGeneratorReplicaPatcher(
-    private val k8sResource: KubernetesResource,
     private val numSensors: String,
-    private val loadGenMaxRecords: String
-) :
-    AbstractPatcher(k8sResource) {
-    override fun <String> patch(value: String) {
-        if (k8sResource is Deployment) {
-            if (value is kotlin.String) {
-                val approxNumSensors = numSensors.toDouble().pow(Integer.parseInt(value).toDouble())
-                val loadGenInstances =
-                    (approxNumSensors + loadGenMaxRecords.toDouble() - 1) / loadGenMaxRecords.toDouble()
-                this.k8sResource.spec.replicas = loadGenInstances.toInt()
-            }
+    private val loadGenMaxRecords: String,
+) : AbstractPatcher() {
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is Deployment) {
+            val approxNumSensors = numSensors.toDouble().pow(Integer.parseInt(value).toDouble())
+            val loadGenInstances =
+                (approxNumSensors + loadGenMaxRecords.toDouble() - 1) / loadGenMaxRecords.toDouble()
+            resource.spec.replicas = loadGenInstances.toInt()
+
         }
+        return resource
     }
 }
+
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/NumSensorsLoadGeneratorReplicaPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/NumSensorsLoadGeneratorReplicaPatcher.kt
index 86bb37db3cb9fd0d3bca1690d5eb4e622329a9bc..8463d672687aa9594e2ef168d53e6d7551bc0d4a 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/NumSensorsLoadGeneratorReplicaPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/NumSensorsLoadGeneratorReplicaPatcher.kt
@@ -1,21 +1,21 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.apps.Deployment
 
 
 class NumSensorsLoadGeneratorReplicaPatcher(
-    private val k8sResource: KubernetesResource,
-    private val loadGenMaxRecords: String
-) :
-    AbstractPatcher(k8sResource) {
-    override fun <String> patch(value: String) {
-        if (k8sResource is Deployment) {
-            if (value is kotlin.String) {
-                val loadGenInstances =
-                    (Integer.parseInt(value) + loadGenMaxRecords.toInt() - 1) / loadGenMaxRecords.toInt()
-                this.k8sResource.spec.replicas = loadGenInstances
-            }
+    private val loadGenMaxRecords: String,
+) : AbstractPatcher() {
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is Deployment) {
+            val loadGenInstances =
+                (Integer.parseInt(value) + loadGenMaxRecords.toInt() - 1) / loadGenMaxRecords.toInt()
+            resource.spec.replicas = loadGenInstances
+
         }
+        return resource
     }
+
 }
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/PatchHandler.kt b/theodolite/src/main/kotlin/theodolite/patcher/PatchHandler.kt
new file mode 100644
index 0000000000000000000000000000000000000000..73f2f2435b42c59a1b0a294c67bbd0c726ffc209
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/patcher/PatchHandler.kt
@@ -0,0 +1,26 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.HasMetadata
+import theodolite.util.InvalidPatcherConfigurationException
+import theodolite.util.PatcherDefinition
+
+class PatchHandler {
+
+    companion object {
+
+        private fun getResourcesToPatch(resources: MutableMap<String, List<HasMetadata>>, patcherDefinition: PatcherDefinition): List<HasMetadata> {
+            return resources[patcherDefinition.resource]
+                ?: throw InvalidPatcherConfigurationException("Could not find resource ${patcherDefinition.resource}")
+
+        }
+
+        fun patchResource(
+            resources: MutableMap<String, List<HasMetadata>>,
+            patcherDefinition: PatcherDefinition,
+            value: String,
+        ): List<HasMetadata> {
+            val resToPatch = getResourcesToPatch(resources, patcherDefinition)
+            return PatcherFactory.createPatcher(patcherDefinition).patch(resToPatch,value)
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/Patcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/Patcher.kt
index 84b886cb4f06b3e667eb8b8aeaa622e1ee54852e..72fe6a1f02e7f1767176fd965740c80f1437f6c1 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/Patcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/Patcher.kt
@@ -1,5 +1,6 @@
 package theodolite.patcher
 
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.quarkus.runtime.annotations.RegisterForReflection
 
 /**
@@ -13,8 +14,7 @@ interface Patcher {
      * The patch method modifies a value in the definition of a
      * Kubernetes resource.
      *
-     * @param T The type of value
      * @param value The value to be used.
      */
-    fun <T> patch(value: T)
+    fun patch(resources: List<HasMetadata>, value: String) : List<HasMetadata>
 }
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/PatcherFactory.kt b/theodolite/src/main/kotlin/theodolite/patcher/PatcherFactory.kt
index e92de4dba7de298c9df76600f2c6785f5878103e..85848a48450637863363a366a1a1767c2c5af565 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/PatcherFactory.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/PatcherFactory.kt
@@ -1,6 +1,5 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
 import theodolite.util.InvalidPatcherConfigurationException
 import theodolite.util.PatcherDefinition
 
@@ -10,94 +9,84 @@ import theodolite.util.PatcherDefinition
  * @constructor Creates an empty PatcherFactory.
  */
 class PatcherFactory {
-    /**
-     * Create patcher based on the given [PatcherDefinition] and
-     * the list of KubernetesResources.
-     *
-     * @param patcherDefinition The [PatcherDefinition] for which are
-     *     [Patcher] should be created.
-     * @param k8sResources List of all available Kubernetes resources.
-     *     This is a list of pairs<String, KubernetesResource>:
-     *     The frist corresponds to the filename where the resource is defined.
-     *     The second corresponds to the concrete [KubernetesResource] that should be patched.
-     * @return The created [Patcher].
-     * @throws IllegalArgumentException if no patcher can be created.
-     */
-    fun createPatcher(
-        patcherDefinition: PatcherDefinition,
-        k8sResources: Collection<Pair<String, KubernetesResource>>
-    ): Patcher {
-        val resource =
-            k8sResources.filter { it.first == patcherDefinition.resource }
-                .map { resource -> resource.second }
-                .firstOrNull()
-                ?: throw InvalidPatcherConfigurationException("Could not find resource ${patcherDefinition.resource}")
 
-        return try {
-            when (patcherDefinition.type) {
-                "ReplicaPatcher" -> ReplicaPatcher(
-                    k8sResource = resource
-                )
-                "NumNestedGroupsLoadGeneratorReplicaPatcher" -> NumNestedGroupsLoadGeneratorReplicaPatcher(
-                    k8sResource = resource,
-                    loadGenMaxRecords = patcherDefinition.properties["loadGenMaxRecords"]!!,
-                    numSensors = patcherDefinition.properties["numSensors"]!!
-                )
-                "NumSensorsLoadGeneratorReplicaPatcher" -> NumSensorsLoadGeneratorReplicaPatcher(
-                    k8sResource = resource,
-                    loadGenMaxRecords = patcherDefinition.properties["loadGenMaxRecords"]!!
-                )
-                "DataVolumeLoadGeneratorReplicaPatcher" -> DataVolumeLoadGeneratorReplicaPatcher(
-                    k8sResource = resource,
-                    maxVolume = patcherDefinition.properties["maxVolume"]!!.toInt(),
-                    container = patcherDefinition.properties["container"]!!,
-                    variableName = patcherDefinition.properties["variableName"]!!
-                )
-                "EnvVarPatcher" -> EnvVarPatcher(
-                    k8sResource = resource,
-                    container = patcherDefinition.properties["container"]!!,
-                    variableName = patcherDefinition.properties["variableName"]!!
-                )
-                "NodeSelectorPatcher" -> NodeSelectorPatcher(
-                    k8sResource = resource,
-                    variableName = patcherDefinition.properties["variableName"]!!
-                )
-                "ResourceLimitPatcher" -> ResourceLimitPatcher(
-                    k8sResource = resource,
-                    container = patcherDefinition.properties["container"]!!,
-                    limitedResource = patcherDefinition.properties["limitedResource"]!!
-                )
-                "ResourceRequestPatcher" -> ResourceRequestPatcher(
-                    k8sResource = resource,
-                    container = patcherDefinition.properties["container"]!!,
-                    requestedResource = patcherDefinition.properties["requestedResource"]!!
-                )
-                "SchedulerNamePatcher" -> SchedulerNamePatcher(
-                    k8sResource = resource
-                )
-                "LabelPatcher" -> LabelPatcher(
-                    k8sResource = resource,
-                    variableName = patcherDefinition.properties["variableName"]!!
-                )
-                "MatchLabelPatcher" -> MatchLabelPatcher(
-                    k8sResource = resource,
-                    variableName = patcherDefinition.properties["variableName"]!!
-                )
-                "TemplateLabelPatcher" -> TemplateLabelPatcher(
-                    k8sResource = resource,
-                    variableName = patcherDefinition.properties["variableName"]!!
-                )
-                "ImagePatcher" -> ImagePatcher(
-                    k8sResource = resource,
-                    container = patcherDefinition.properties["container"]!!
+    companion object {
+        /**
+         * Create patcher based on the given [PatcherDefinition] and
+         * the list of KubernetesResources.
+         *
+         * @param patcherDefinition The [PatcherDefinition] for which are
+         *     [Patcher] should be created.
+         * @param k8sResources List of all available Kubernetes resources.
+         *     This is a list of pairs<String, KubernetesResource>:
+         *     The frist corresponds to the filename where the resource is defined.
+         *     The second corresponds to the concrete [KubernetesResource] that should be patched.
+         * @return The created [Patcher].
+         * @throws IllegalArgumentException if no patcher can be created.
+         */
+        fun createPatcher(
+            patcherDefinition: PatcherDefinition,
+        ): Patcher {
+
+            return try {
+                when (patcherDefinition.type) {
+                    "ReplicaPatcher" -> ReplicaPatcher(
+                    )
+                    "NumNestedGroupsLoadGeneratorReplicaPatcher" -> NumNestedGroupsLoadGeneratorReplicaPatcher(
+                        loadGenMaxRecords = patcherDefinition.properties["loadGenMaxRecords"]!!,
+                        numSensors = patcherDefinition.properties["numSensors"]!!
+                    )
+                    "NumSensorsLoadGeneratorReplicaPatcher" -> NumSensorsLoadGeneratorReplicaPatcher(
+                        loadGenMaxRecords = patcherDefinition.properties["loadGenMaxRecords"]!!
+                    )
+                    "DataVolumeLoadGeneratorReplicaPatcher" -> DataVolumeLoadGeneratorReplicaPatcher(
+                        maxVolume = patcherDefinition.properties["maxVolume"]!!.toInt(),
+                        container = patcherDefinition.properties["container"]!!,
+                        variableName = patcherDefinition.properties["variableName"]!!
+                    )
+                    "EnvVarPatcher" -> EnvVarPatcher(
+                        container = patcherDefinition.properties["container"]!!,
+                        variableName = patcherDefinition.properties["variableName"]!!
+                    )
+                    "NodeSelectorPatcher" -> NodeSelectorPatcher(
+                        variableName = patcherDefinition.properties["variableName"]!!
+                    )
+                    "ResourceLimitPatcher" -> ResourceLimitPatcher(
+                        container = patcherDefinition.properties["container"]!!,
+                        limitedResource = patcherDefinition.properties["limitedResource"]!!
+                    )
+                    "ResourceRequestPatcher" -> ResourceRequestPatcher(
+                        container = patcherDefinition.properties["container"]!!,
+                        requestedResource = patcherDefinition.properties["requestedResource"]!!
+                    )
+                    "SchedulerNamePatcher" -> SchedulerNamePatcher()
+                    "LabelPatcher" -> LabelPatcher(
+                        variableName = patcherDefinition.properties["variableName"]!!
+                    )
+                    "MatchLabelPatcher" -> MatchLabelPatcher(
+                        variableName = patcherDefinition.properties["variableName"]!!
+                    )
+                    "TemplateLabelPatcher" -> TemplateLabelPatcher(
+                        variableName = patcherDefinition.properties["variableName"]!!
+                    )
+                    "ImagePatcher" -> ImagePatcher(
+                        container = patcherDefinition.properties["container"]!!
+                    )
+                    "NamePatcher" -> NamePatcher()
+                    "ServiceSelectorPatcher" -> ServiceSelectorPatcher(
+                        variableName = patcherDefinition.properties["label"]!!
+                    )
+                    "theodolite.patcher.VolumesConfigMapPatcher" -> VolumesConfigMapPatcher(
+                        volumeName = patcherDefinition.properties["volumeName"]!!
+                    )
+                    else -> throw InvalidPatcherConfigurationException("Patcher type ${patcherDefinition.type} not found.")
+                }
+            } catch (e: NullPointerException) {
+                throw InvalidPatcherConfigurationException(
+                    "Could not create patcher with type ${patcherDefinition.type}" +
+                            " Probably a required patcher argument was not specified.", e
                 )
-                else -> throw InvalidPatcherConfigurationException("Patcher type ${patcherDefinition.type} not found.")
             }
-        } catch (e: NullPointerException) {
-            throw InvalidPatcherConfigurationException(
-                "Could not create patcher with type ${patcherDefinition.type}" +
-                        " Probably a required patcher argument was not specified.", e
-            )
         }
     }
 }
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/ReplicaPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/ReplicaPatcher.kt
index 4cc35f2ed74f9e366c266c3f98f1b3d36d4ba1b8..837bebf9da968d9afd7da6846575c9f1f457a3e3 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/ReplicaPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/ReplicaPatcher.kt
@@ -1,19 +1,18 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.apps.Deployment
 
 /**
  * The Replica [Patcher] modifies the number of replicas for the given Kubernetes deployment.
  *
- * @param k8sResource  Kubernetes resource to be patched.
  */
-class ReplicaPatcher(private val k8sResource: KubernetesResource) : AbstractPatcher(k8sResource) {
-    override fun <String> patch(value: String) {
-        if (k8sResource is Deployment) {
-            if (value is kotlin.String) {
-                this.k8sResource.spec.replicas = Integer.parseInt(value)
-            }
+class ReplicaPatcher : AbstractPatcher() {
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is Deployment) {
+            resource.spec.replicas = Integer.parseInt(value)
         }
+        return resource
     }
-}
+}
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/ResourceLimitPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/ResourceLimitPatcher.kt
index 9dcdffa0407dd4fdaf2d9b0a898bcdf6cebe5a8b..8b75d43bfc5b589c8c65a1016058a5b850ac9063 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/ResourceLimitPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/ResourceLimitPatcher.kt
@@ -1,9 +1,6 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.Container
-import io.fabric8.kubernetes.api.model.KubernetesResource
-import io.fabric8.kubernetes.api.model.Quantity
-import io.fabric8.kubernetes.api.model.ResourceRequirements
+import io.fabric8.kubernetes.api.model.*
 import io.fabric8.kubernetes.api.model.apps.Deployment
 import io.fabric8.kubernetes.api.model.apps.StatefulSet
 import theodolite.util.InvalidPatcherConfigurationException
@@ -16,30 +13,31 @@ import theodolite.util.InvalidPatcherConfigurationException
  * @param limitedResource The resource to be limited (e.g. **cpu or memory**)
  */
 class ResourceLimitPatcher(
-    private val k8sResource: KubernetesResource,
     private val container: String,
     private val limitedResource: String
-) : AbstractPatcher(k8sResource) {
+) : AbstractPatcher() {
 
-    override fun <String> patch(value: String) {
-        when (k8sResource) {
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        when (resource) {
             is Deployment -> {
-                k8sResource.spec.template.spec.containers.filter { it.name == container }.forEach {
-                    setLimits(it, value as kotlin.String)
+                resource.spec.template.spec.containers.filter { it.name == container }.forEach {
+                    setLimits(it, value)
                 }
             }
             is StatefulSet -> {
-                k8sResource.spec.template.spec.containers.filter { it.name == container }.forEach {
-                    setLimits(it, value as kotlin.String)
+                resource.spec.template.spec.containers.filter { it.name == container }.forEach {
+                    setLimits(it, value)
                 }
             }
             else -> {
-                throw InvalidPatcherConfigurationException("ResourceLimitPatcher not applicable for $k8sResource")
+                throw InvalidPatcherConfigurationException("ResourceLimitPatcher is not applicable for $resource")
             }
         }
+        return resource
     }
 
-    private fun setLimits(container: Container, value: String) {
+
+        private fun setLimits(container: Container, value: String) {
         when {
             container.resources == null -> {
                 val resource = ResourceRequirements()
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/ResourceRequestPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/ResourceRequestPatcher.kt
index 24cdde40f7f78bd67d115b2dc44f47e180f51ee2..f63386e5565d053bf276ccada628c3a1676c7c68 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/ResourceRequestPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/ResourceRequestPatcher.kt
@@ -1,9 +1,6 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.Container
-import io.fabric8.kubernetes.api.model.KubernetesResource
-import io.fabric8.kubernetes.api.model.Quantity
-import io.fabric8.kubernetes.api.model.ResourceRequirements
+import io.fabric8.kubernetes.api.model.*
 import io.fabric8.kubernetes.api.model.apps.Deployment
 import io.fabric8.kubernetes.api.model.apps.StatefulSet
 import theodolite.util.InvalidPatcherConfigurationException
@@ -11,34 +8,33 @@ import theodolite.util.InvalidPatcherConfigurationException
 /**
  * The Resource request [Patcher] set resource limits for deployments and statefulSets.
  *
- * @param k8sResource Kubernetes resource to be patched.
  * @param container Container to be patched.
  * @param requestedResource The resource to be requested (e.g. **cpu or memory**)
  */
 class ResourceRequestPatcher(
-    private val k8sResource: KubernetesResource,
     private val container: String,
     private val requestedResource: String
-) : AbstractPatcher(k8sResource) {
+) : AbstractPatcher() {
 
-    override fun <String> patch(value: String) {
-        when (k8sResource) {
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        when (resource) {
             is Deployment -> {
-                k8sResource.spec.template.spec.containers.filter { it.name == container }.forEach {
-                    setRequests(it, value as kotlin.String)
+                resource.spec.template.spec.containers.filter { it.name == container }.forEach {
+                    setRequests(it, value)
                 }
             }
             is StatefulSet -> {
-                k8sResource.spec.template.spec.containers.filter { it.name == container }.forEach {
-                    setRequests(it, value as kotlin.String)
+                resource.spec.template.spec.containers.filter { it.name == container }.forEach {
+                    setRequests(it, value)
                 }
             }
             else -> {
-                throw InvalidPatcherConfigurationException("ResourceRequestPatcher not applicable for $k8sResource")
+                throw InvalidPatcherConfigurationException("ResourceRequestPatcher is not applicable for $resource")
             }
         }
+        return resource
     }
-
     private fun setRequests(container: Container, value: String) {
         when {
             container.resources == null -> {
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/SchedulerNamePatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/SchedulerNamePatcher.kt
index 348f0c50090a34c91221d3e099c3532375a578da..fc6a2864b1cc9495336a2e4756da97b2bd498dc3 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/SchedulerNamePatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/SchedulerNamePatcher.kt
@@ -1,6 +1,6 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.apps.Deployment
 
 /**
@@ -8,10 +8,13 @@ import io.fabric8.kubernetes.api.model.apps.Deployment
  * be used to deploy the given deployment.
  * @param k8sResource Kubernetes resource to be patched.
  */
-class SchedulerNamePatcher(private val k8sResource: KubernetesResource) : Patcher {
-    override fun <String> patch(value: String) {
-        if (k8sResource is Deployment) {
-            k8sResource.spec.template.spec.schedulerName = value as kotlin.String
+class SchedulerNamePatcher : AbstractPatcher() {
+
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is Deployment) {
+            resource.spec.template.spec.schedulerName = value
         }
+        return resource
     }
 }
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/ServiceSelectorPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/ServiceSelectorPatcher.kt
new file mode 100644
index 0000000000000000000000000000000000000000..3d94e283902b9879225ca4b8730730697ebe02a7
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/patcher/ServiceSelectorPatcher.kt
@@ -0,0 +1,19 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.HasMetadata
+import io.fabric8.kubernetes.api.model.Service
+
+class ServiceSelectorPatcher(
+    private var variableName: String
+    ) : AbstractPatcher() {
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is Service) {
+            if (resource.spec.selector == null) {
+                resource.spec.selector = mutableMapOf()
+            }
+            resource.spec.selector[this.variableName] = value
+        }
+        return resource
+    }
+    }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/TemplateLabelPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/TemplateLabelPatcher.kt
index a524e5c40f90ccf98dc95003cc33dcfceb6f8598..2707d98e046ce9aef01285d9febc7ab3b6d4c45d 100644
--- a/theodolite/src/main/kotlin/theodolite/patcher/TemplateLabelPatcher.kt
+++ b/theodolite/src/main/kotlin/theodolite/patcher/TemplateLabelPatcher.kt
@@ -1,34 +1,34 @@
 package theodolite.patcher
 
-import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.api.model.apps.Deployment
 import io.fabric8.kubernetes.api.model.apps.StatefulSet
 
 /**
  * This patcher is able to set the field `spec.template.metadata.labels` for a `Deployment` or `StatefulSet` Kubernetes resource.
  *
- * @property k8sResource The Kubernetes manifests to patch
  * @property variableName The label which should be set
  */
-class TemplateLabelPatcher(private val k8sResource: KubernetesResource, val variableName: String) :
-    AbstractPatcher(k8sResource) {
+class TemplateLabelPatcher(
+    val variableName: String) :
+    AbstractPatcher() {
 
-    override fun <String> patch(labelValue: String) {
-        if (labelValue is kotlin.String) {
-            when (k8sResource) {
-                is Deployment -> {
-                    if (k8sResource.spec.template.metadata.labels == null) {
-                        k8sResource.spec.template.metadata.labels = mutableMapOf()
-                    }
-                    k8sResource.spec.template.metadata.labels[this.variableName] = labelValue
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        when (resource) {
+            is Deployment -> {
+                if (resource.spec.template.metadata.labels == null) {
+                    resource.spec.template.metadata.labels = mutableMapOf()
                 }
-                is StatefulSet -> {
-                    if (k8sResource.spec.template.metadata.labels == null) {
-                        k8sResource.spec.template.metadata.labels = mutableMapOf()
-                    }
-                    k8sResource.spec.template.metadata.labels[this.variableName] = labelValue
+                resource.spec.template.metadata.labels[this.variableName] = value
+            }
+            is StatefulSet -> {
+                if (resource.spec.template.metadata.labels == null) {
+                    resource.spec.template.metadata.labels = mutableMapOf()
                 }
+                resource.spec.template.metadata.labels[this.variableName] = value
             }
         }
+        return resource
     }
 }
\ No newline at end of file
diff --git a/theodolite/src/main/kotlin/theodolite/patcher/VolumesConfigMapPatcher.kt b/theodolite/src/main/kotlin/theodolite/patcher/VolumesConfigMapPatcher.kt
new file mode 100644
index 0000000000000000000000000000000000000000..17068c7e7f206b1bbed4530c2008b60d3aaf593e
--- /dev/null
+++ b/theodolite/src/main/kotlin/theodolite/patcher/VolumesConfigMapPatcher.kt
@@ -0,0 +1,44 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.HasMetadata
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.fabric8.kubernetes.api.model.apps.StatefulSet
+
+class VolumesConfigMapPatcher(private var volumeName: String
+) : AbstractPatcher() {
+
+    override fun patchSingleResource(resource: HasMetadata, value: String): HasMetadata {
+        if (resource is Deployment) {
+            if (resource.spec.template.spec.volumes == null) {
+                resource.spec.template.spec.volumes = mutableListOf()
+            }
+            val volumeMounts = resource.spec.template.spec.volumes
+
+            for (mount in volumeMounts) {
+                try {
+                    if (mount.configMap.name == volumeName) {
+                        mount.configMap.name = value
+                    }
+                } catch (_: NullPointerException) {
+                }
+            }
+        }
+        if (resource is StatefulSet) {
+            if (resource.spec.template.spec.volumes == null) {
+                resource.spec.template.spec.volumes = mutableListOf()
+            }
+            val volumeMounts = resource.spec.template.spec.volumes
+
+            for (mount in volumeMounts) {
+                try {
+                    if (mount.configMap.name == volumeName) {
+                        mount.configMap.name = value
+                    }
+                } catch (_: NullPointerException) {
+                }
+            }
+        }
+
+        return resource
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/benchmark/ActionCommandTest.kt b/theodolite/src/test/kotlin/theodolite/benchmark/ActionCommandTest.kt
index 0e40fca5caf9fe721c547e09d2ba22c25860a1bf..47f0e52f45e46e3cda093ff1b9722071f22ef7e8 100644
--- a/theodolite/src/test/kotlin/theodolite/benchmark/ActionCommandTest.kt
+++ b/theodolite/src/test/kotlin/theodolite/benchmark/ActionCommandTest.kt
@@ -102,7 +102,7 @@ class ActionCommandTest {
         val action = Action()
         action.selector = ActionSelector()
         action.selector.pod = PodSelector()
-        action.selector.pod.matchLabels = mutableMapOf("app" to "pod")
+        action.selector.pod.matchLabels = mapOf("app" to "pod")
         action.exec = Command()
         action.exec.command = arrayOf("ls")
         action.exec.timeoutSeconds = 10L
@@ -118,7 +118,7 @@ class ActionCommandTest {
         val action = Action()
         action.selector = ActionSelector()
         action.selector.pod = PodSelector()
-        action.selector.pod.matchLabels = mutableMapOf("app" to "pod")
+        action.selector.pod.matchLabels = mapOf("app" to "pod")
         action.exec = Command()
         action.exec.command = arrayOf("error-command")
         action.exec.timeoutSeconds = 10L
diff --git a/theodolite/src/test/kotlin/theodolite/benchmark/ResourceSetsTest.kt b/theodolite/src/test/kotlin/theodolite/benchmark/ResourceSetsTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..b2ce9d73447961c56b121542a4c91822e3703e95
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/benchmark/ResourceSetsTest.kt
@@ -0,0 +1,133 @@
+package theodolite.benchmark
+
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
+import io.fabric8.kubernetes.api.model.ConfigMap
+import io.fabric8.kubernetes.api.model.ConfigMapBuilder
+import io.fabric8.kubernetes.api.model.HasMetadata
+import io.fabric8.kubernetes.client.server.mock.KubernetesServer
+import io.quarkus.test.junit.QuarkusTest
+import io.quarkus.test.kubernetes.client.KubernetesTestServer
+import io.quarkus.test.kubernetes.client.WithKubernetesTestServer
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.Assertions.assertEquals
+import org.junit.jupiter.api.Assertions.assertTrue
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+import org.junit.jupiter.api.assertThrows
+import org.junit.jupiter.api.io.TempDir
+import theodolite.util.DeploymentFailedException
+import java.nio.file.Files
+import java.nio.file.Path
+
+@QuarkusTest
+@WithKubernetesTestServer
+internal class ResourceSetsTest {
+
+    @KubernetesTestServer
+    private lateinit var server: KubernetesServer
+
+    @TempDir
+    @JvmField
+    final var tempDir: Path? = null
+
+    private val objectMapper: ObjectMapper = ObjectMapper(YAMLFactory())
+
+    @BeforeEach
+    fun setUp() {
+        server.before()
+    }
+
+    @AfterEach
+    fun tearDown() {
+        server.after()
+    }
+
+    private fun deployAndGetResource(vararg resources: HasMetadata): ConfigMapResourceSet {
+        val configMap = ConfigMapBuilder()
+            .withNewMetadata().withName(resources[0].metadata.name).endMetadata()
+            .let {
+                resources.foldIndexed(it) { i, b, r ->
+                    b.addToData("resource_$i.yaml", objectMapper.writeValueAsString(r))
+                }
+            }
+            .build()
+
+        server.client.configMaps().createOrReplace(configMap)
+
+        val resourceSet = ConfigMapResourceSet()
+        resourceSet.name = resources[0].metadata.name
+
+        return resourceSet
+    }
+
+    private fun copyTestResourceFile(fileName: String, tempDir: Path) {
+        val stream = javaClass.getResourceAsStream("/k8s-resource-files/$fileName")
+            ?: throw IllegalArgumentException("File does not exist")
+        val target = tempDir.resolve(fileName)
+        Files.copy(stream, target)
+    }
+
+    @Test
+    fun testLoadConfigMap() {
+        val resource = ConfigMapBuilder()
+            .withNewMetadata()
+            .withName("test-configmap")
+            .endMetadata()
+            .build()
+        deployAndGetResource(resource)
+
+        val yamlString =
+            """
+            configMap:    
+                name: test-configmap
+                files:
+            """
+
+        val resourcesSet: ResourceSets = objectMapper.readValue(yamlString, ResourceSets::class.java)
+        assertTrue(resourcesSet.fileSystem == null)
+        assertTrue(resourcesSet.configMap != null)
+
+        val configMap = resourcesSet.loadResourceSet(server.client)
+        assertEquals(1, configMap.size)
+        assertTrue(configMap.toList().first().second is ConfigMap)
+        assertTrue(configMap.toList().first().second.toString().contains(other = resource.metadata.name))
+
+        assertEquals(configMap.elementAt(0).second, resource)
+    }
+
+    @Test
+    fun testLoadFileSystem(@TempDir tempDir: Path) {
+        copyTestResourceFile("test-deployment.yaml", tempDir)
+
+        val resourceSet = FileSystemResourceSet()
+        resourceSet.path = tempDir.toString()
+        resourceSet.files = listOf("test-deployment.yaml")
+        assertEquals(1, resourceSet.getResourceSet(server.client).size)
+
+        val yamlString =
+            """
+            fileSystem:    
+                path: ${resourceSet.path}
+                files:
+                    - test-deployment.yaml
+            """
+
+        val resourcesSet: ResourceSets = objectMapper.readValue(yamlString, ResourceSets::class.java)
+        assertTrue(resourcesSet.fileSystem != null)
+        assertTrue(resourcesSet.configMap == null)
+
+        val fileSystem = resourcesSet.loadResourceSet(server.client)
+        assertEquals(fileSystem.size, 1)
+        assertTrue(fileSystem.elementAt(0).second is HasMetadata)
+    }
+
+    @Test
+    fun testEmptyResourceSets() {
+        val resourceSet = ResourceSets()
+
+        assertThrows<DeploymentFailedException> {
+            resourceSet.loadResourceSet(server.client)
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkCRDummy.kt b/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkCRDummy.kt
index cbddbfbfc5d6f838677c6d04b0a0c79f59d8bc66..d6841429166d1549e84ad27887fbf0cba86b174d 100644
--- a/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkCRDummy.kt
+++ b/theodolite/src/test/kotlin/theodolite/execution/operator/BenchmarkCRDummy.kt
@@ -20,11 +20,12 @@ class BenchmarkCRDummy(name: String) {
         kafkaConfig.bootstrapServer = ""
         kafkaConfig.topics = emptyList()
 
+
         benchmarkCR.spec = benchmark
         benchmarkCR.metadata.name = name
         benchmarkCR.kind = "Benchmark"
         benchmarkCR.apiVersion = "v1"
-
+        benchmark.waitForResourcesEnabled = false
 
         benchmark.infrastructure = Resources()
         benchmark.sut = Resources()
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/AbstractPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/AbstractPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..05bb9588a2de5656b9c0b39d16d2160f691bbe91
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/AbstractPatcherTest.kt
@@ -0,0 +1,101 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.*
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.fabric8.kubernetes.api.model.apps.DeploymentBuilder
+import io.fabric8.kubernetes.api.model.apps.StatefulSet
+import io.fabric8.kubernetes.api.model.apps.StatefulSetBuilder
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.Test
+import theodolite.util.PatcherDefinition
+
+@QuarkusTest
+abstract class AbstractPatcherTest {
+
+    lateinit var resource: List<HasMetadata>
+    lateinit var patcher: Patcher
+    lateinit var value: String
+
+    fun createDeployment(): HasMetadata {
+        return DeploymentBuilder()
+            .withNewMetadata()
+                .withName("dummy")
+            .endMetadata()
+            .withNewSpec()
+                .withNewSelector()
+                    .withMatchLabels<String, String>(mapOf("labelName" to "labelValue"))
+                .endSelector()
+                    .withNewTemplate()
+                        .withNewMetadata()
+                            .withLabels<String, String>(mapOf("labelName" to "labelValue"))
+                        .endMetadata()
+                        .withNewSpec()
+                        .withContainers(
+                                ContainerBuilder()
+                                    .withName("container")
+                                    .withImage("test-image")
+                                    .build())
+                            .addNewVolume()
+                                .withName("test-volume")
+                                .withNewConfigMap()
+                                    .withName("test-configmap")
+                                .endConfigMap()
+                            .endVolume()
+                        .endSpec()
+                .endTemplate()
+            .endSpec()
+            .build()
+    }
+
+    fun createStateFulSet(): HasMetadata {
+        return StatefulSetBuilder()
+            .withNewMetadata()
+                .withName("dummy")
+            .endMetadata()
+            .withNewSpec()
+                .withNewSelector()
+                    .withMatchLabels<String, String>(mapOf("labelName" to "labelValue"))
+                .endSelector()
+                .withNewTemplate()
+                    .withNewMetadata()
+                        .withLabels<String, String>(mapOf("labelName" to "labelValue"))
+                    .endMetadata()
+                    .withNewSpec()
+                    .addNewVolume()
+                        .withName("test-volume")
+                            .withNewConfigMap()
+                                .withName("test-configmap")
+                            .endConfigMap()
+                        .endVolume()
+                    .endSpec()
+                .endTemplate()
+            .endSpec()
+            .build()
+    }
+
+    fun createService(): HasMetadata {
+        return ServiceBuilder()
+            .withNewMetadata()
+            .withName("dummy")
+            .endMetadata()
+            .build()
+    }
+
+    fun createConfigMap(): HasMetadata {
+        return ConfigMapBuilder()
+            .withNewMetadata()
+                .withName("dummy")
+            .endMetadata()
+            .withData<String, String>(mapOf("application.properties" to "propA = valueA"))
+            .build()
+    }
+
+    fun patch() {
+        resource = patcher.patch(resource, value)
+    }
+
+    @Test
+    abstract fun validate()
+
+
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/DataVolumeLoadGeneratorReplicaPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/DataVolumeLoadGeneratorReplicaPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..e55767ea79f1925a3825aca11eb74a8641c17a90
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/DataVolumeLoadGeneratorReplicaPatcherTest.kt
@@ -0,0 +1,28 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class DataVolumeLoadGeneratorReplicaPatcherTest: AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = VolumesConfigMapPatcher((resource.first() as Deployment).spec.template.spec.volumes[0].configMap.name)
+        value = "new-configMapName"
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assert((it as Deployment).spec.template.spec.volumes[0].configMap.name == value)
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/EnvVarPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/EnvVarPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..cc46347acf5b005ed05170fe27a40de3ca69599d
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/EnvVarPatcherTest.kt
@@ -0,0 +1,35 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.EnvVar
+import io.fabric8.kubernetes.api.model.EnvVarBuilder
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+import org.junit.jupiter.api.BeforeEach
+
+@QuarkusTest
+internal class EnvVarPatcherTest : AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = EnvVarPatcher(variableName = "testEnv", container = "container")
+        value = "testValue"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        val envVar = EnvVarBuilder().withName("testEnv").withValue("testValue").build()
+        resource.forEach {
+        assertTrue((it as Deployment).spec.template.spec.containers[0].env.contains(envVar))
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/ImagePatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/ImagePatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..6592f65934716bfd74d562c5a3fb52ddb40c8b86
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/ImagePatcherTest.kt
@@ -0,0 +1,32 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class ImagePatcherTest: AbstractPatcherTest(){
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = ImagePatcher(container = "container")
+        value = "testValue"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assertTrue((it as Deployment).spec.template.spec.containers[0].image  == value)
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/LabelPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/LabelPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..64583e4be67282543a44d09b36e657eede2f9eac
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/LabelPatcherTest.kt
@@ -0,0 +1,37 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class LabelPatcherTest: AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = LabelPatcher("labelName")
+        value = "labelValue"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assertTrue((it as Deployment).metadata.labels.containsKey("labelName"))
+            assertTrue(it.metadata.labels.get("labelName")=="labelValue")
+        }
+    }
+
+    @Test
+    fun getVariableName() {
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/MatchLabelPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/MatchLabelPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..796f2ffeb6f4c22b9d00218b91f0fbe2ee9f6567
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/MatchLabelPatcherTest.kt
@@ -0,0 +1,37 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class MatchLabelPatcherTest: AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = MatchLabelPatcher("labelName")
+        value = "labelValue"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assertTrue((it as Deployment).spec.selector.matchLabels.containsKey("labelName"))
+            assertTrue(it.spec.selector.matchLabels.get("labelName")=="labelValue")
+        }
+    }
+
+    @Test
+    fun getVariableName() {
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/NamePatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/NamePatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..5ae75c5b0dca038b8e351683bfd0ee2a40d217eb
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/NamePatcherTest.kt
@@ -0,0 +1,33 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.KubernetesResource
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class NamePatcherTest: AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = NamePatcher()
+        value = "newName"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            println(it.toString())
+            assertTrue(it.toString().contains("name=$value"))
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/NodeSelectorPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/NodeSelectorPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..a042faf8484eb0ce7e1e21a6069be2beeff0b693
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/NodeSelectorPatcherTest.kt
@@ -0,0 +1,36 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+import org.mockito.kotlin.reset
+
+@QuarkusTest
+internal class NodeSelectorPatcherTest: AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = NodeSelectorPatcher("nodeName")
+        value = "nodeValue"
+
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assertTrue((it as Deployment).spec.template.spec.nodeSelector.containsKey("nodeName"))
+            assertTrue(it.spec.template.spec.nodeSelector["nodeName"] == value)
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/NumNestedGroupsLoadGeneratorReplicaPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/NumNestedGroupsLoadGeneratorReplicaPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..6b2a639285b134c0efcaaf5b296f18779f4f8322
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/NumNestedGroupsLoadGeneratorReplicaPatcherTest.kt
@@ -0,0 +1,32 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class NumNestedGroupsLoadGeneratorReplicaPatcherTest : AbstractPatcherTest(){
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = NumNestedGroupsLoadGeneratorReplicaPatcher("10", "500")
+        value = "2"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assertTrue((it as Deployment).spec.replicas == 1)
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/NumSensorsLoadGeneratorReplicaPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/NumSensorsLoadGeneratorReplicaPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..50d122e60104ad0239824e5b4471ade8b3ff7bfb
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/NumSensorsLoadGeneratorReplicaPatcherTest.kt
@@ -0,0 +1,32 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class NumSensorsLoadGeneratorReplicaPatcherTest: AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = NumSensorsLoadGeneratorReplicaPatcher("10")
+        value = "2"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assertTrue((it as Deployment).spec.replicas == 1)
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/PatcherDefinitionFactoryTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/PatcherDefinitionFactoryTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..4696e646726f9ed6ad3e4c5cda1631ded42930e4
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/PatcherDefinitionFactoryTest.kt
@@ -0,0 +1,22 @@
+package theodolite.patcher
+
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+internal class PatcherDefinitionFactoryTest {
+
+    @BeforeEach
+    fun setUp() {
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    fun createPatcherDefinition() {
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/PatcherFactoryTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/PatcherFactoryTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..1c3ecffa06f91d1d6c87706bb3fb28e94c414c35
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/PatcherFactoryTest.kt
@@ -0,0 +1,17 @@
+package theodolite.patcher
+
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+
+import org.junit.jupiter.api.Assertions.*
+
+internal class PatcherFactoryTest {
+
+    @BeforeEach
+    fun setUp() {
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/ReplicaPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/ReplicaPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..0e01dead66509e40237952e4d65ea3a377943c5b
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/ReplicaPatcherTest.kt
@@ -0,0 +1,32 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class ReplicaPatcherTest: AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = ReplicaPatcher()
+        value = "5"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assertTrue((it as Deployment).spec.replicas == 5)
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/ResourceLimitPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/ResourceLimitPatcherTest.kt
index 2769f2fef607a03d820b0821969db98894944cb3..b794ec6ed983dba92526aff67ecb3cab915871eb 100644
--- a/theodolite/src/test/kotlin/theodolite/patcher/ResourceLimitPatcherTest.kt
+++ b/theodolite/src/test/kotlin/theodolite/patcher/ResourceLimitPatcherTest.kt
@@ -1,5 +1,6 @@
 package theodolite.patcher
 
+import io.fabric8.kubernetes.api.model.HasMetadata
 import io.fabric8.kubernetes.client.server.mock.KubernetesServer
 import io.quarkus.test.junit.QuarkusTest
 import io.quarkus.test.kubernetes.client.KubernetesTestServer
@@ -7,7 +8,6 @@ import io.quarkus.test.kubernetes.client.WithKubernetesTestServer
 import org.junit.jupiter.api.Assertions.assertTrue
 import org.junit.jupiter.api.Disabled
 import org.junit.jupiter.api.Test
-import theodolite.patcher.PatcherFactory
 import theodolite.util.PatcherDefinition
 
 /**
@@ -25,8 +25,6 @@ import theodolite.util.PatcherDefinition
 @Disabled
 class ResourceLimitPatcherTest {
 
-    val patcherFactory = PatcherFactory()
-
     @KubernetesTestServer
     private lateinit var server: KubernetesServer
 
@@ -51,15 +49,8 @@ class ResourceLimitPatcherTest {
             "container" to "uc-application"
         )
 
-        patcherFactory.createPatcher(
-            patcherDefinition = defCPU,
-            k8sResources = listOf(Pair("/cpu-memory-deployment.yaml", k8sResource))
-        ).patch(value = cpuValue)
-
-        patcherFactory.createPatcher(
-            patcherDefinition = defMEM,
-            k8sResources = listOf(Pair("/cpu-memory-deployment.yaml", k8sResource))
-        ).patch(value = memValue)
+        PatchHandler.patchResource(mutableMapOf(Pair("cpu-memory-deployment.yaml", listOf(k8sResource as HasMetadata))), defCPU, cpuValue)
+        PatchHandler.patchResource(mutableMapOf(Pair("cpu-memory-deployment.yaml", listOf(k8sResource as HasMetadata))), defMEM, memValue)
 
         k8sResource.spec.template.spec.containers.filter { it.name == defCPU.properties["container"]!! }
             .forEach {
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/ResourceRequestPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/ResourceRequestPatcherTest.kt
index dba91eb65d4474d38f64d7fdd7f7ab981f8eb30f..300397a96abd34f17f1a4a3d2b3c76e2d9da13ea 100644
--- a/theodolite/src/test/kotlin/theodolite/patcher/ResourceRequestPatcherTest.kt
+++ b/theodolite/src/test/kotlin/theodolite/patcher/ResourceRequestPatcherTest.kt
@@ -25,8 +25,6 @@ class ResourceRequestPatcherTest {
     @KubernetesTestServer
     private lateinit var server: KubernetesServer
 
-    val patcherFactory = PatcherFactory()
-
     fun applyTest(fileName: String) {
         val cpuValue = "50m"
         val memValue = "3Gi"
@@ -48,14 +46,8 @@ class ResourceRequestPatcherTest {
             "container" to "application"
         )
 
-        patcherFactory.createPatcher(
-            patcherDefinition = defCPU,
-            k8sResources = listOf(Pair("/cpu-memory-deployment.yaml", k8sResource))
-        ).patch(value = cpuValue)
-        patcherFactory.createPatcher(
-            patcherDefinition = defMEM,
-            k8sResources = listOf(Pair("/cpu-memory-deployment.yaml", k8sResource))
-        ).patch(value = memValue)
+        PatchHandler.patchResource(mutableMapOf(Pair("/cpu-memory-deployment.yaml", listOf(k8sResource))), defCPU, cpuValue)
+        PatchHandler.patchResource(mutableMapOf(Pair("/cpu-memory-deployment.yaml", listOf(k8sResource))), defMEM, memValue)
 
         k8sResource.spec.template.spec.containers.filter { it.name == defCPU.properties["container"]!! }
             .forEach {
@@ -87,4 +79,4 @@ class ResourceRequestPatcherTest {
         // Case 4: In the given YAML declaration neither `Resource Request` nor `Request Limit` is defined
         applyTest("/no-resources-deployment.yaml")
     }
-}
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/SchedulerNamePatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/SchedulerNamePatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..8b6f3e9b1371bce3e17cbbc6e399d425baffd699
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/SchedulerNamePatcherTest.kt
@@ -0,0 +1,32 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class SchedulerNamePatcherTest : AbstractPatcherTest(){
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = SchedulerNamePatcher()
+        value = "testScheduler"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assertTrue((it as Deployment).spec.template.spec.schedulerName == value)
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/ServiceSelectorPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/ServiceSelectorPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..caffb7eaf37c3930dbbb8a043ccd1cb7bbfd8d74
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/ServiceSelectorPatcherTest.kt
@@ -0,0 +1,22 @@
+package theodolite.patcher
+
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+internal class ServiceSelectorPatcherTest {
+
+    @BeforeEach
+    fun setUp() {
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    fun patch() {
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/TemplateLabelPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/TemplateLabelPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..ebfe147e7b503defe14439fb1b954b9dd269ea3e
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/TemplateLabelPatcherTest.kt
@@ -0,0 +1,34 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class TemplateLabelPatcherTest: AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = TemplateLabelPatcher( "labelName")
+        value = "labelValue"
+    }
+
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            assertTrue((it as Deployment).spec.template.metadata.labels.containsKey("labelName"))
+            assertTrue(it.spec.template.metadata.labels["labelName"] =="labelValue")
+        }
+    }
+
+    @Test
+    fun getVariableName() {
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/kotlin/theodolite/patcher/VolumesConfigMapPatcherTest.kt b/theodolite/src/test/kotlin/theodolite/patcher/VolumesConfigMapPatcherTest.kt
new file mode 100644
index 0000000000000000000000000000000000000000..5628fd7c50336dc620dec79945c69fd9856a9c91
--- /dev/null
+++ b/theodolite/src/test/kotlin/theodolite/patcher/VolumesConfigMapPatcherTest.kt
@@ -0,0 +1,33 @@
+package theodolite.patcher
+
+import io.fabric8.kubernetes.api.model.apps.Deployment
+import io.quarkus.test.junit.QuarkusTest
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+import org.junit.jupiter.api.Assertions.*
+
+@QuarkusTest
+internal class VolumesConfigMapPatcherTest: AbstractPatcherTest() {
+
+    @BeforeEach
+    fun setUp() {
+        resource = listOf(createDeployment())
+        patcher = VolumesConfigMapPatcher("test-configmap")
+        value = "patchedVolumeName"
+    }
+
+    @AfterEach
+    fun tearDown() {
+    }
+
+    @Test
+    override fun validate() {
+        patch()
+        resource.forEach {
+            println((it as Deployment).spec.template.spec.volumes[0].configMap.name)
+            assertTrue((it as Deployment).spec.template.spec.volumes[0].configMap.name == value)
+        }
+    }
+}
\ No newline at end of file
diff --git a/theodolite/src/test/resources/k8s-resource-files/test-benchmark.yaml b/theodolite/src/test/resources/k8s-resource-files/test-benchmark.yaml
index ea9ee8471d3da1dc6011348bd978696bd0fa6f36..102a6a249ab06301396eaf375e7bd2590b334b22 100644
--- a/theodolite/src/test/resources/k8s-resource-files/test-benchmark.yaml
+++ b/theodolite/src/test/resources/k8s-resource-files/test-benchmark.yaml
@@ -3,6 +3,7 @@ kind: benchmark
 metadata:
   name: example-benchmark
 spec:
+  waitForResourcesEnabled: false
   sut:
     resources:
       - configMap: